16
build.gradle
16
build.gradle
@@ -12,16 +12,20 @@ repositories { jcenter() }
|
||||
springBoot { // when using spring loaded turn on noverify
|
||||
noverify = true }
|
||||
|
||||
applicationDefaultJvmArgs = [
|
||||
"-Dfile.encoding=UTF-8",
|
||||
// if you need to debug java agents:
|
||||
"-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5006"
|
||||
]
|
||||
ext {
|
||||
applicationDefaultJvmArgs = [
|
||||
"-Dfile.encoding=UTF-8",
|
||||
// if you need to debug java agents:
|
||||
"-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5006"
|
||||
]
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compile (project(':ch.psi.daq.query'))
|
||||
compile 'org.hibernate:hibernate-validator:5.2.0.Final'
|
||||
|
||||
compile libraries.logback_classic
|
||||
|
||||
compile 'org.hibernate:hibernate-validator:5.2.0.Final'
|
||||
compile(libraries.spring_boot_starter_web) {
|
||||
exclude group: 'org.slf4j', module: 'log4j-over-slf4j'
|
||||
}
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
package ch.psi.daq.queryrest;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.boot.autoconfigure.data.cassandra.CassandraDataAutoConfiguration;
|
||||
import org.springframework.boot.builder.SpringApplicationBuilder;
|
||||
import org.springframework.boot.context.web.SpringBootServletInitializer;
|
||||
|
||||
@@ -10,6 +12,9 @@ import org.springframework.boot.context.web.SpringBootServletInitializer;
|
||||
* wires all the @RestController annotated classes.
|
||||
*/
|
||||
@SpringBootApplication
|
||||
@EnableAutoConfiguration(exclude={
|
||||
CassandraDataAutoConfiguration.class
|
||||
})
|
||||
public class QueryRestApplication extends SpringBootServletInitializer {
|
||||
|
||||
public static void main(final String[] args) {
|
||||
|
||||
65
src/main/resources/logback-server.xml
Normal file
65
src/main/resources/logback-server.xml
Normal file
@@ -0,0 +1,65 @@
|
||||
<!--
|
||||
In order to disable debug.log, comment-out the ASYNCDEBUGLOG
|
||||
appender reference in the root level section below.
|
||||
-->
|
||||
|
||||
<configuration scan="true">
|
||||
<jmxConfigurator />
|
||||
|
||||
<shutdownHook class="ch.qos.logback.core.hook.DelayingShutdownHook"/>
|
||||
|
||||
<!-- DEBUGLOG rolling file appender to debug.log (all levels) -->
|
||||
|
||||
<appender name="DEBUGLOG" class="ch.qos.logback.core.rolling.RollingFileAppender">
|
||||
<file>${daq.logdir}/debug.log</file>
|
||||
<rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
|
||||
<fileNamePattern>${daq.logdir}/debug.log.%i.zip</fileNamePattern>
|
||||
<minIndex>1</minIndex>
|
||||
<maxIndex>20</maxIndex>
|
||||
</rollingPolicy>
|
||||
<triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
|
||||
<maxFileSize>20MB</maxFileSize>
|
||||
</triggeringPolicy>
|
||||
<encoder>
|
||||
<pattern>%-5level [%thread] %date{ISO8601} %F:%L - %msg%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<!-- ASYNCLOG assynchronous appender to debug.log (all levels) -->
|
||||
|
||||
<appender name="ASYNCDEBUGLOG" class="ch.qos.logback.classic.AsyncAppender">
|
||||
<queueSize>1024</queueSize>
|
||||
<discardingThreshold>0</discardingThreshold>
|
||||
<includeCallerData>true</includeCallerData>
|
||||
<appender-ref ref="DEBUGLOG" />
|
||||
</appender>
|
||||
|
||||
<!-- STDOUT console appender to stdout (INFO level) -->
|
||||
|
||||
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
|
||||
<level>INFO</level>
|
||||
</filter>
|
||||
<encoder>
|
||||
<pattern>%-5level %date{ISO8601} %msg%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<root level="INFO">
|
||||
<appender-ref ref="STDOUT" />
|
||||
<!-- Uncomment next line to enable debug.log -->
|
||||
<!-- <appender-ref ref="ASYNCDEBUGLOG" /> -->
|
||||
</root>
|
||||
|
||||
<logger name="ch.psi.bsread" level="DEBUG"/>
|
||||
<logger name="ch.psi.data" level="DEBUG"/>
|
||||
<logger name="ch.psi.daq.archiverappliance" level="DEBUG"/>
|
||||
<logger name="ch.psi.daq.cassandra" level="DEBUG"/>
|
||||
<logger name="ch.psi.daq.common" level="DEBUG"/>
|
||||
<logger name="ch.psi.daq.dispatcher" level="DEBUG"/>
|
||||
<logger name="ch.psi.daq.domain" level="DEBUG"/>
|
||||
<logger name="ch.psi.daq.processing" level="DEBUG"/>
|
||||
<logger name="ch.psi.daq.query" level="DEBUG"/>
|
||||
<logger name="ch.psi.daq.cassandralocal" level="DEBUG"/>
|
||||
<logger name="ch.psi.daq.daqlocal" level="DEBUG"/>
|
||||
</configuration>
|
||||
@@ -1,6 +1,6 @@
|
||||
# defines the fields that are included in the response
|
||||
# if no fields have been specified by the user
|
||||
queryrest.default.response.fields=channel,pulseId,globalSeconds,shape,eventCount,value
|
||||
queryrest.default.response.fields=channel,pulseId,globalSeconds,iocSeconds,shape,eventCount,value
|
||||
|
||||
# aggregation which are included in the response by default if aggregation is enabled for a given query
|
||||
queryrest.default.response.aggregations=min,mean,max
|
||||
|
||||
@@ -59,7 +59,7 @@ public class DaqWebMvcConfig extends WebMvcConfigurationSupport {
|
||||
|
||||
@Bean
|
||||
@Lazy
|
||||
public CassandraTestAdmin cassandraTestAdmin() {
|
||||
public CassandraTestAdmin testAdmin() {
|
||||
return new CassandraTestAdminImpl();
|
||||
}
|
||||
|
||||
|
||||
@@ -50,7 +50,7 @@ import ch.psi.daq.test.queryrest.AbstractDaqRestTest;
|
||||
public class QueryRestControllerCsvTest extends AbstractDaqRestTest {
|
||||
|
||||
@Resource
|
||||
private CassandraTestAdmin cassandraTestAdmin;
|
||||
private CassandraTestAdmin testAdmin;
|
||||
|
||||
@Resource
|
||||
private CassandraDataGen dataGen;
|
||||
|
||||
@@ -37,7 +37,7 @@ import ch.psi.daq.test.queryrest.AbstractDaqRestTest;
|
||||
public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
|
||||
|
||||
@Resource
|
||||
private CassandraTestAdmin cassandraTestAdmin;
|
||||
private CassandraTestAdmin testAdmin;
|
||||
|
||||
@Resource
|
||||
private CassandraDataGen dataGen;
|
||||
@@ -337,6 +337,44 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
|
||||
TestTimeUtils.getTimeStr(1, 10000000)))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].iocMillis").value(1010));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPulseRangeQuery_Fields() throws Exception {
|
||||
DAQQuery request = new DAQQuery(
|
||||
new RequestRangePulseId(
|
||||
100,
|
||||
199),
|
||||
TEST_CHANNEL_NAMES);
|
||||
request.addField(QueryField.pulseId);
|
||||
request.addField(QueryField.eventCount);
|
||||
request.setNrOfBins(2);
|
||||
|
||||
String content = mapper.writeValueAsString(request);
|
||||
System.out.println(content);
|
||||
|
||||
this.mockMvc
|
||||
.perform(MockMvcRequestBuilders
|
||||
.post(QueryRestController.QUERY)
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content(content))
|
||||
|
||||
.andDo(MockMvcResultHandlers.print())
|
||||
.andExpect(MockMvcResultMatchers.status().isOk())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(100))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].eventCount").value(50))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(150))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].eventCount").value(50))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(100))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].eventCount").value(50))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(150))
|
||||
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].eventCount").value(50));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPulseRangeQueryBackends() throws Exception {
|
||||
|
||||
@@ -8,18 +8,23 @@ import java.util.regex.Pattern;
|
||||
import java.util.stream.LongStream;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import ch.psi.daq.common.ordering.Ordering;
|
||||
import ch.psi.daq.common.time.TimeUtils;
|
||||
import ch.psi.daq.domain.DataEvent;
|
||||
import ch.psi.daq.domain.cassandra.ChannelEvent;
|
||||
import ch.psi.daq.domain.cassandra.FieldNames;
|
||||
import ch.psi.daq.domain.cassandra.utils.TablePropertiesUtils;
|
||||
import ch.psi.daq.domain.reader.Backend;
|
||||
import ch.psi.daq.domain.reader.DataReader;
|
||||
|
||||
public class DummyArchiverApplianceReader implements DataReader {
|
||||
public static final String ARCHIVER_TEST_CHANNEL = "ArchiverTestChannel_";
|
||||
|
||||
private static final int KEYSPACE = 1;
|
||||
public static final String TEST_CHANNEL_1 = "ArchiverChannel_1";
|
||||
public static final String TEST_CHANNEL_2 = "ArchiverChannel_2";
|
||||
private List<String> channels = Lists.newArrayList(TEST_CHANNEL_1, TEST_CHANNEL_2);
|
||||
@@ -49,30 +54,33 @@ public class DummyArchiverApplianceReader implements DataReader {
|
||||
@Override
|
||||
public Stream<? extends DataEvent> getEventStream(String channel, long startPulseId, long endPulseId,
|
||||
Ordering ordering, boolean aggregateValues, String... columns) {
|
||||
return getElements(channel, startPulseId, endPulseId);
|
||||
return getDummyEventStream(channel, startPulseId, endPulseId, columns);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stream<? extends DataEvent> getEventStream(String channel, BigDecimal startTime, BigDecimal endTime,
|
||||
Ordering ordering, boolean aggregateValues, String... columns) {
|
||||
return getElements(channel, TimeUtils.getMillis(startTime) / 10, TimeUtils.getMillis(endTime) / 10);
|
||||
return getDummyEventStream(channel, TimeUtils.getMillis(startTime) / 10, TimeUtils.getMillis(endTime) / 10);
|
||||
}
|
||||
|
||||
protected Stream<? extends DataEvent> getElements(String channel, long start, long end) {
|
||||
String channelLower = channel.toLowerCase();
|
||||
private Stream<? extends DataEvent> getDummyEventStream(String channelParam, long startIndex, long endIndex, String... columns) {
|
||||
String channelLower = channelParam.toLowerCase();
|
||||
String channel = (columns == null || columns.length == 0 || ArrayUtils.contains(columns, FieldNames.FIELD_CHANNEL)) ? channelParam : null;
|
||||
|
||||
Stream<? extends DataEvent> eventStream = LongStream.rangeClosed(start, end).mapToObj(i -> {
|
||||
BigDecimal time = TimeUtils.getTimeFromMillis(i * 10, 0);
|
||||
Stream<? extends DataEvent> eventStream = LongStream.rangeClosed(startIndex, endIndex).mapToObj(i -> {
|
||||
BigDecimal iocTime = (columns == null || columns.length == 0 || ArrayUtils.contains(columns, FieldNames.FIELD_IOC_TIME)) ? TimeUtils.getTimeFromMillis(i * 10, 0) : TablePropertiesUtils.DEFAULT_VALUE_DECIMAL;
|
||||
BigDecimal globalTime = (columns == null || columns.length == 0 || ArrayUtils.contains(columns, FieldNames.FIELD_GLOBAL_TIME)) ? TimeUtils.getTimeFromMillis(i * 10, 0) : TablePropertiesUtils.DEFAULT_VALUE_DECIMAL;
|
||||
long pulseId = (columns == null || columns.length == 0 || ArrayUtils.contains(columns, FieldNames.FIELD_PULSE_ID)) ? i : TablePropertiesUtils.DEFAULT_VALUE_BIGINT_PRIMITIVE;
|
||||
|
||||
if (channelLower.contains("waveform")) {
|
||||
long[] value = random.longs(2048).toArray();
|
||||
value[0] = i;
|
||||
return new ChannelEvent(
|
||||
channel,
|
||||
time,
|
||||
i,
|
||||
time,
|
||||
1,
|
||||
iocTime,
|
||||
pulseId,
|
||||
globalTime,
|
||||
KEYSPACE,
|
||||
value
|
||||
);
|
||||
|
||||
@@ -84,20 +92,20 @@ public class DummyArchiverApplianceReader implements DataReader {
|
||||
value[0] = i;
|
||||
return new ChannelEvent(
|
||||
channel,
|
||||
time,
|
||||
i,
|
||||
time,
|
||||
1,
|
||||
iocTime,
|
||||
pulseId,
|
||||
globalTime,
|
||||
KEYSPACE,
|
||||
value,
|
||||
shape
|
||||
);
|
||||
} else {
|
||||
return new ChannelEvent(
|
||||
channel,
|
||||
time,
|
||||
i,
|
||||
time,
|
||||
1,
|
||||
iocTime,
|
||||
pulseId,
|
||||
globalTime,
|
||||
KEYSPACE,
|
||||
i
|
||||
);
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@ import java.util.stream.Collectors;
|
||||
import java.util.stream.LongStream;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -26,12 +27,14 @@ import ch.psi.daq.common.time.TimeUtils;
|
||||
import ch.psi.daq.domain.DataEvent;
|
||||
import ch.psi.daq.domain.cassandra.ChannelConfiguration;
|
||||
import ch.psi.daq.domain.cassandra.ChannelEvent;
|
||||
import ch.psi.daq.domain.cassandra.FieldNames;
|
||||
import ch.psi.daq.domain.cassandra.MetaPulseId;
|
||||
import ch.psi.daq.domain.cassandra.query.PulseIdRangeQuery;
|
||||
import ch.psi.daq.domain.cassandra.query.TimeRangeQuery;
|
||||
import ch.psi.daq.domain.cassandra.querying.ChannelEventQuery;
|
||||
import ch.psi.daq.domain.cassandra.querying.MetaChannelEvent;
|
||||
import ch.psi.daq.domain.cassandra.querying.EventQuery;
|
||||
import ch.psi.daq.domain.cassandra.utils.TablePropertiesUtils;
|
||||
import ch.psi.daq.domain.reader.Backend;
|
||||
|
||||
public class DummyCassandraReader implements CassandraReader {
|
||||
@@ -102,7 +105,7 @@ public class DummyCassandraReader implements CassandraReader {
|
||||
@Override
|
||||
public Stream<? extends DataEvent> getEventStream(String channel, long startPulseId, long endPulseId,
|
||||
Ordering ordering, boolean aggregateValues, String... columns) {
|
||||
return getDummyEventStream(channel, startPulseId, endPulseId);
|
||||
return getDummyEventStream(channel, startPulseId, endPulseId, columns);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -111,7 +114,7 @@ public class DummyCassandraReader implements CassandraReader {
|
||||
@Override
|
||||
public Stream<? extends DataEvent> getEventStream(String channel, BigDecimal startTime, BigDecimal endTime,
|
||||
Ordering ordering, boolean aggregateValues, String... columns) {
|
||||
return getDummyEventStream(channel, TimeUtils.getMillis(startTime) / 10, TimeUtils.getMillis(endTime) / 10);
|
||||
return getDummyEventStream(channel, TimeUtils.getMillis(startTime) / 10, TimeUtils.getMillis(endTime) / 10, columns);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -136,7 +139,7 @@ public class DummyCassandraReader implements CassandraReader {
|
||||
@Override
|
||||
public Stream<ChannelEvent> getEventStream(PulseIdRangeQuery query) {
|
||||
Stream<ChannelEvent> dummyEventStream =
|
||||
getDummyEventStream(query.getChannel(), query.getStartPulseId(), query.getEndPulseId())
|
||||
getDummyEventStream(query.getChannel(), query.getStartPulseId(), query.getEndPulseId(), query.getEventColumns())
|
||||
.map(ce -> {
|
||||
return (ChannelEvent) ce;
|
||||
});
|
||||
@@ -150,7 +153,7 @@ public class DummyCassandraReader implements CassandraReader {
|
||||
@Override
|
||||
public Stream<ChannelEvent> getEventStream(TimeRangeQuery query) {
|
||||
Stream<ChannelEvent> dummyEventStream =
|
||||
getDummyEventStream(query.getChannel(), query.getStartMillis() / 10, query.getEndMillis() / 10)
|
||||
getDummyEventStream(query.getChannel(), query.getStartMillis() / 10, query.getEndMillis() / 10, query.getEventColumns())
|
||||
.map(ce -> {
|
||||
return (ChannelEvent) ce;
|
||||
});
|
||||
@@ -158,20 +161,23 @@ public class DummyCassandraReader implements CassandraReader {
|
||||
}
|
||||
|
||||
|
||||
private Stream<? extends DataEvent> getDummyEventStream(String channel, long startIndex, long endIndex) {
|
||||
String channelLower = channel.toLowerCase();
|
||||
private Stream<? extends DataEvent> getDummyEventStream(String channelParam, long startIndex, long endIndex, String... columns) {
|
||||
String channelLower = channelParam.toLowerCase();
|
||||
String channel = (columns == null || columns.length == 0 || ArrayUtils.contains(columns, FieldNames.FIELD_CHANNEL)) ? channelParam : null;
|
||||
|
||||
Stream<? extends DataEvent> eventStream = LongStream.rangeClosed(startIndex, endIndex).mapToObj(i -> {
|
||||
BigDecimal time = TimeUtils.getTimeFromMillis(i * 10, 0);
|
||||
BigDecimal iocTime = (columns == null || columns.length == 0 || ArrayUtils.contains(columns, FieldNames.FIELD_IOC_TIME)) ? TimeUtils.getTimeFromMillis(i * 10, 0) : TablePropertiesUtils.DEFAULT_VALUE_DECIMAL;
|
||||
BigDecimal globalTime = (columns == null || columns.length == 0 || ArrayUtils.contains(columns, FieldNames.FIELD_GLOBAL_TIME)) ? TimeUtils.getTimeFromMillis(i * 10, 0) : TablePropertiesUtils.DEFAULT_VALUE_DECIMAL;
|
||||
long pulseId = (columns == null || columns.length == 0 || ArrayUtils.contains(columns, FieldNames.FIELD_PULSE_ID)) ? i : TablePropertiesUtils.DEFAULT_VALUE_BIGINT_PRIMITIVE;
|
||||
|
||||
if (channelLower.contains("waveform")) {
|
||||
long[] value = random.longs(2048).toArray();
|
||||
value[0] = i;
|
||||
return new ChannelEvent(
|
||||
channel,
|
||||
time,
|
||||
i,
|
||||
time,
|
||||
iocTime,
|
||||
pulseId,
|
||||
globalTime,
|
||||
KEYSPACE,
|
||||
value
|
||||
);
|
||||
@@ -184,9 +190,9 @@ public class DummyCassandraReader implements CassandraReader {
|
||||
value[0] = i;
|
||||
return new ChannelEvent(
|
||||
channel,
|
||||
time,
|
||||
i,
|
||||
time,
|
||||
iocTime,
|
||||
pulseId,
|
||||
globalTime,
|
||||
KEYSPACE,
|
||||
value,
|
||||
shape
|
||||
@@ -194,9 +200,9 @@ public class DummyCassandraReader implements CassandraReader {
|
||||
} else {
|
||||
return new ChannelEvent(
|
||||
channel,
|
||||
time,
|
||||
i,
|
||||
time,
|
||||
iocTime,
|
||||
pulseId,
|
||||
globalTime,
|
||||
KEYSPACE,
|
||||
i
|
||||
);
|
||||
@@ -206,8 +212,8 @@ public class DummyCassandraReader implements CassandraReader {
|
||||
return eventStream;
|
||||
}
|
||||
|
||||
private List<? extends DataEvent> getDummyEvents(String channel, long startIndex, long endIndex) {
|
||||
return getDummyEventStream(channel, startIndex, endIndex).collect(Collectors.toList());
|
||||
private List<? extends DataEvent> getDummyEvents(String channel, long startIndex, long endIndex, String...columns) {
|
||||
return getDummyEventStream(channel, startIndex, endIndex, columns).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -234,7 +240,7 @@ public class DummyCassandraReader implements CassandraReader {
|
||||
@Override
|
||||
public ChannelEvent getEvent(MetaChannelEvent queryInfo, String... columns) {
|
||||
if (queryInfo.getPulseId() > 0) {
|
||||
return (ChannelEvent) getDummyEvents(queryInfo.getChannel(), queryInfo.getPulseId(), queryInfo.getPulseId())
|
||||
return (ChannelEvent) getDummyEvents(queryInfo.getChannel(), queryInfo.getPulseId(), queryInfo.getPulseId(), columns)
|
||||
.get(0);
|
||||
}
|
||||
return (ChannelEvent) getDummyEvents(queryInfo.getChannel(), queryInfo.getGlobalMillis() / 10,
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<configuration>
|
||||
<jmxConfigurator />
|
||||
|
||||
<appender name="consoleAppender" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder>
|
||||
<Pattern>.%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg %n
|
||||
@@ -10,8 +12,8 @@
|
||||
</filter>
|
||||
</appender>
|
||||
|
||||
<logger name="ch.psi.daq.queryrest" additivity="false">
|
||||
<level value="INFO" />
|
||||
<logger name="ch.psi.daq.queryrest">
|
||||
<level value="DEBUG" />
|
||||
<appender-ref ref="consoleAppender" />
|
||||
</logger>
|
||||
|
||||
Reference in New Issue
Block a user