Logging config and new spring version stuff.

This commit is contained in:
Fabian Märki
2016-03-17 09:02:51 +01:00
parent 652f900e86
commit 9fe84bfe5a
8 changed files with 64 additions and 42 deletions

View File

@ -1,7 +1,9 @@
package ch.psi.daq.queryrest;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.data.cassandra.CassandraDataAutoConfiguration;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.boot.context.web.SpringBootServletInitializer;
@ -10,6 +12,9 @@ import org.springframework.boot.context.web.SpringBootServletInitializer;
* wires all the @RestController annotated classes.
*/
@SpringBootApplication
@EnableAutoConfiguration(exclude={
CassandraDataAutoConfiguration.class
})
public class QueryRestApplication extends SpringBootServletInitializer {
public static void main(final String[] args) {

View File

@ -5,6 +5,7 @@ appender reference in the root level section below.
<configuration scan="true">
<jmxConfigurator />
<shutdownHook class="ch.qos.logback.core.hook.DelayingShutdownHook"/>
<!-- DEBUGLOG rolling file appender to debug.log (all levels) -->

View File

@ -59,7 +59,7 @@ public class DaqWebMvcConfig extends WebMvcConfigurationSupport {
@Bean
@Lazy
public CassandraTestAdmin cassandraTestAdmin() {
public CassandraTestAdmin testAdmin() {
return new CassandraTestAdminImpl();
}

View File

@ -50,7 +50,7 @@ import ch.psi.daq.test.queryrest.AbstractDaqRestTest;
public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
@Resource
private CassandraTestAdmin cassandraTestAdmin;
private CassandraTestAdmin testAdmin;
@Resource
private CassandraDataGen dataGen;

View File

@ -37,7 +37,7 @@ import ch.psi.daq.test.queryrest.AbstractDaqRestTest;
public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
@Resource
private CassandraTestAdmin cassandraTestAdmin;
private CassandraTestAdmin testAdmin;
@Resource
private CassandraDataGen dataGen;

View File

@ -8,18 +8,23 @@ import java.util.regex.Pattern;
import java.util.stream.LongStream;
import java.util.stream.Stream;
import org.apache.commons.lang3.ArrayUtils;
import com.google.common.collect.Lists;
import ch.psi.daq.common.ordering.Ordering;
import ch.psi.daq.common.time.TimeUtils;
import ch.psi.daq.domain.DataEvent;
import ch.psi.daq.domain.cassandra.ChannelEvent;
import ch.psi.daq.domain.cassandra.FieldNames;
import ch.psi.daq.domain.cassandra.utils.TablePropertiesUtils;
import ch.psi.daq.domain.reader.Backend;
import ch.psi.daq.domain.reader.DataReader;
public class DummyArchiverApplianceReader implements DataReader {
public static final String ARCHIVER_TEST_CHANNEL = "ArchiverTestChannel_";
private static final int KEYSPACE = 1;
public static final String TEST_CHANNEL_1 = "ArchiverChannel_1";
public static final String TEST_CHANNEL_2 = "ArchiverChannel_2";
private List<String> channels = Lists.newArrayList(TEST_CHANNEL_1, TEST_CHANNEL_2);
@ -49,30 +54,33 @@ public class DummyArchiverApplianceReader implements DataReader {
@Override
public Stream<? extends DataEvent> getEventStream(String channel, long startPulseId, long endPulseId,
Ordering ordering, boolean aggregateValues, String... columns) {
return getElements(channel, startPulseId, endPulseId);
return getDummyEventStream(channel, startPulseId, endPulseId, columns);
}
@Override
public Stream<? extends DataEvent> getEventStream(String channel, BigDecimal startTime, BigDecimal endTime,
Ordering ordering, boolean aggregateValues, String... columns) {
return getElements(channel, TimeUtils.getMillis(startTime) / 10, TimeUtils.getMillis(endTime) / 10);
return getDummyEventStream(channel, TimeUtils.getMillis(startTime) / 10, TimeUtils.getMillis(endTime) / 10);
}
protected Stream<? extends DataEvent> getElements(String channel, long start, long end) {
String channelLower = channel.toLowerCase();
private Stream<? extends DataEvent> getDummyEventStream(String channelParam, long startIndex, long endIndex, String... columns) {
String channelLower = channelParam.toLowerCase();
String channel = (columns == null || columns.length == 0 || ArrayUtils.contains(columns, FieldNames.FIELD_CHANNEL)) ? channelParam : null;
Stream<? extends DataEvent> eventStream = LongStream.rangeClosed(start, end).mapToObj(i -> {
BigDecimal time = TimeUtils.getTimeFromMillis(i * 10, 0);
Stream<? extends DataEvent> eventStream = LongStream.rangeClosed(startIndex, endIndex).mapToObj(i -> {
BigDecimal iocTime = (columns == null || columns.length == 0 || ArrayUtils.contains(columns, FieldNames.FIELD_IOC_TIME)) ? TimeUtils.getTimeFromMillis(i * 10, 0) : TablePropertiesUtils.DEFAULT_VALUE_DECIMAL;
BigDecimal globalTime = (columns == null || columns.length == 0 || ArrayUtils.contains(columns, FieldNames.FIELD_GLOBAL_TIME)) ? TimeUtils.getTimeFromMillis(i * 10, 0) : TablePropertiesUtils.DEFAULT_VALUE_DECIMAL;
long pulseId = (columns == null || columns.length == 0 || ArrayUtils.contains(columns, FieldNames.FIELD_PULSE_ID)) ? i : TablePropertiesUtils.DEFAULT_VALUE_BIGINT_PRIMITIVE;
if (channelLower.contains("waveform")) {
long[] value = random.longs(2048).toArray();
value[0] = i;
return new ChannelEvent(
channel,
time,
i,
time,
1,
iocTime,
pulseId,
globalTime,
KEYSPACE,
value
);
@ -84,20 +92,20 @@ public class DummyArchiverApplianceReader implements DataReader {
value[0] = i;
return new ChannelEvent(
channel,
time,
i,
time,
1,
iocTime,
pulseId,
globalTime,
KEYSPACE,
value,
shape
);
} else {
return new ChannelEvent(
channel,
time,
i,
time,
1,
iocTime,
pulseId,
globalTime,
KEYSPACE,
i
);
}

View File

@ -14,6 +14,7 @@ import java.util.stream.Collectors;
import java.util.stream.LongStream;
import java.util.stream.Stream;
import org.apache.commons.lang3.ArrayUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -26,12 +27,14 @@ import ch.psi.daq.common.time.TimeUtils;
import ch.psi.daq.domain.DataEvent;
import ch.psi.daq.domain.cassandra.ChannelConfiguration;
import ch.psi.daq.domain.cassandra.ChannelEvent;
import ch.psi.daq.domain.cassandra.FieldNames;
import ch.psi.daq.domain.cassandra.MetaPulseId;
import ch.psi.daq.domain.cassandra.query.PulseIdRangeQuery;
import ch.psi.daq.domain.cassandra.query.TimeRangeQuery;
import ch.psi.daq.domain.cassandra.querying.ChannelEventQuery;
import ch.psi.daq.domain.cassandra.querying.MetaChannelEvent;
import ch.psi.daq.domain.cassandra.querying.EventQuery;
import ch.psi.daq.domain.cassandra.utils.TablePropertiesUtils;
import ch.psi.daq.domain.reader.Backend;
public class DummyCassandraReader implements CassandraReader {
@ -102,7 +105,7 @@ public class DummyCassandraReader implements CassandraReader {
@Override
public Stream<? extends DataEvent> getEventStream(String channel, long startPulseId, long endPulseId,
Ordering ordering, boolean aggregateValues, String... columns) {
return getDummyEventStream(channel, startPulseId, endPulseId);
return getDummyEventStream(channel, startPulseId, endPulseId, columns);
}
/**
@ -111,7 +114,7 @@ public class DummyCassandraReader implements CassandraReader {
@Override
public Stream<? extends DataEvent> getEventStream(String channel, BigDecimal startTime, BigDecimal endTime,
Ordering ordering, boolean aggregateValues, String... columns) {
return getDummyEventStream(channel, TimeUtils.getMillis(startTime) / 10, TimeUtils.getMillis(endTime) / 10);
return getDummyEventStream(channel, TimeUtils.getMillis(startTime) / 10, TimeUtils.getMillis(endTime) / 10, columns);
}
/**
@ -136,7 +139,7 @@ public class DummyCassandraReader implements CassandraReader {
@Override
public Stream<ChannelEvent> getEventStream(PulseIdRangeQuery query) {
Stream<ChannelEvent> dummyEventStream =
getDummyEventStream(query.getChannel(), query.getStartPulseId(), query.getEndPulseId())
getDummyEventStream(query.getChannel(), query.getStartPulseId(), query.getEndPulseId(), query.getEventColumns())
.map(ce -> {
return (ChannelEvent) ce;
});
@ -150,7 +153,7 @@ public class DummyCassandraReader implements CassandraReader {
@Override
public Stream<ChannelEvent> getEventStream(TimeRangeQuery query) {
Stream<ChannelEvent> dummyEventStream =
getDummyEventStream(query.getChannel(), query.getStartMillis() / 10, query.getEndMillis() / 10)
getDummyEventStream(query.getChannel(), query.getStartMillis() / 10, query.getEndMillis() / 10, query.getEventColumns())
.map(ce -> {
return (ChannelEvent) ce;
});
@ -158,20 +161,23 @@ public class DummyCassandraReader implements CassandraReader {
}
private Stream<? extends DataEvent> getDummyEventStream(String channel, long startIndex, long endIndex) {
String channelLower = channel.toLowerCase();
private Stream<? extends DataEvent> getDummyEventStream(String channelParam, long startIndex, long endIndex, String... columns) {
String channelLower = channelParam.toLowerCase();
String channel = (columns == null || columns.length == 0 || ArrayUtils.contains(columns, FieldNames.FIELD_CHANNEL)) ? channelParam : null;
Stream<? extends DataEvent> eventStream = LongStream.rangeClosed(startIndex, endIndex).mapToObj(i -> {
BigDecimal time = TimeUtils.getTimeFromMillis(i * 10, 0);
BigDecimal iocTime = (columns == null || columns.length == 0 || ArrayUtils.contains(columns, FieldNames.FIELD_IOC_TIME)) ? TimeUtils.getTimeFromMillis(i * 10, 0) : TablePropertiesUtils.DEFAULT_VALUE_DECIMAL;
BigDecimal globalTime = (columns == null || columns.length == 0 || ArrayUtils.contains(columns, FieldNames.FIELD_GLOBAL_TIME)) ? TimeUtils.getTimeFromMillis(i * 10, 0) : TablePropertiesUtils.DEFAULT_VALUE_DECIMAL;
long pulseId = (columns == null || columns.length == 0 || ArrayUtils.contains(columns, FieldNames.FIELD_PULSE_ID)) ? i : TablePropertiesUtils.DEFAULT_VALUE_BIGINT_PRIMITIVE;
if (channelLower.contains("waveform")) {
long[] value = random.longs(2048).toArray();
value[0] = i;
return new ChannelEvent(
channel,
time,
i,
time,
iocTime,
pulseId,
globalTime,
KEYSPACE,
value
);
@ -184,9 +190,9 @@ public class DummyCassandraReader implements CassandraReader {
value[0] = i;
return new ChannelEvent(
channel,
time,
i,
time,
iocTime,
pulseId,
globalTime,
KEYSPACE,
value,
shape
@ -194,9 +200,9 @@ public class DummyCassandraReader implements CassandraReader {
} else {
return new ChannelEvent(
channel,
time,
i,
time,
iocTime,
pulseId,
globalTime,
KEYSPACE,
i
);
@ -206,8 +212,8 @@ public class DummyCassandraReader implements CassandraReader {
return eventStream;
}
private List<? extends DataEvent> getDummyEvents(String channel, long startIndex, long endIndex) {
return getDummyEventStream(channel, startIndex, endIndex).collect(Collectors.toList());
private List<? extends DataEvent> getDummyEvents(String channel, long startIndex, long endIndex, String...columns) {
return getDummyEventStream(channel, startIndex, endIndex, columns).collect(Collectors.toList());
}
/**
@ -234,7 +240,7 @@ public class DummyCassandraReader implements CassandraReader {
@Override
public ChannelEvent getEvent(MetaChannelEvent queryInfo, String... columns) {
if (queryInfo.getPulseId() > 0) {
return (ChannelEvent) getDummyEvents(queryInfo.getChannel(), queryInfo.getPulseId(), queryInfo.getPulseId())
return (ChannelEvent) getDummyEvents(queryInfo.getChannel(), queryInfo.getPulseId(), queryInfo.getPulseId(), columns)
.get(0);
}
return (ChannelEvent) getDummyEvents(queryInfo.getChannel(), queryInfo.getGlobalMillis() / 10,

View File

@ -1,5 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true">
<configuration>
<jmxConfigurator />
<appender name="consoleAppender" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<Pattern>.%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg %n