This commit is contained in:
Fabian Märki
2016-03-15 16:47:48 +01:00
parent b4235a7a66
commit 652f900e86
5 changed files with 116 additions and 10 deletions

View File

@ -12,16 +12,20 @@ repositories { jcenter() }
springBoot { // when using spring loaded turn on noverify springBoot { // when using spring loaded turn on noverify
noverify = true } noverify = true }
ext {
applicationDefaultJvmArgs = [ applicationDefaultJvmArgs = [
"-Dfile.encoding=UTF-8", "-Dfile.encoding=UTF-8",
// if you need to debug java agents: // if you need to debug java agents:
"-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5006" "-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5006"
] ]
}
dependencies { dependencies {
compile (project(':ch.psi.daq.query')) compile (project(':ch.psi.daq.query'))
compile 'org.hibernate:hibernate-validator:5.2.0.Final'
compile libraries.logback_classic
compile 'org.hibernate:hibernate-validator:5.2.0.Final'
compile(libraries.spring_boot_starter_web) { compile(libraries.spring_boot_starter_web) {
exclude group: 'org.slf4j', module: 'log4j-over-slf4j' exclude group: 'org.slf4j', module: 'log4j-over-slf4j'
} }

View File

@ -0,0 +1,64 @@
<!--
In order to disable debug.log, comment-out the ASYNCDEBUGLOG
appender reference in the root level section below.
-->
<configuration scan="true">
<jmxConfigurator />
<shutdownHook class="ch.qos.logback.core.hook.DelayingShutdownHook"/>
<!-- DEBUGLOG rolling file appender to debug.log (all levels) -->
<appender name="DEBUGLOG" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${daq.logdir}/debug.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
<fileNamePattern>${daq.logdir}/debug.log.%i.zip</fileNamePattern>
<minIndex>1</minIndex>
<maxIndex>20</maxIndex>
</rollingPolicy>
<triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
<maxFileSize>20MB</maxFileSize>
</triggeringPolicy>
<encoder>
<pattern>%-5level [%thread] %date{ISO8601} %F:%L - %msg%n</pattern>
</encoder>
</appender>
<!-- ASYNCLOG assynchronous appender to debug.log (all levels) -->
<appender name="ASYNCDEBUGLOG" class="ch.qos.logback.classic.AsyncAppender">
<queueSize>1024</queueSize>
<discardingThreshold>0</discardingThreshold>
<includeCallerData>true</includeCallerData>
<appender-ref ref="DEBUGLOG" />
</appender>
<!-- STDOUT console appender to stdout (INFO level) -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>INFO</level>
</filter>
<encoder>
<pattern>%-5level %date{ISO8601} %msg%n</pattern>
</encoder>
</appender>
<root level="INFO">
<appender-ref ref="STDOUT" />
<!-- Uncomment next line to enable debug.log -->
<!-- <appender-ref ref="ASYNCDEBUGLOG" /> -->
</root>
<logger name="ch.psi.bsread" level="DEBUG"/>
<logger name="ch.psi.data" level="DEBUG"/>
<logger name="ch.psi.daq.archiverappliance" level="DEBUG"/>
<logger name="ch.psi.daq.cassandra" level="DEBUG"/>
<logger name="ch.psi.daq.common" level="DEBUG"/>
<logger name="ch.psi.daq.dispatcher" level="DEBUG"/>
<logger name="ch.psi.daq.domain" level="DEBUG"/>
<logger name="ch.psi.daq.processing" level="DEBUG"/>
<logger name="ch.psi.daq.query" level="DEBUG"/>
<logger name="ch.psi.daq.cassandralocal" level="DEBUG"/>
<logger name="ch.psi.daq.daqlocal" level="DEBUG"/>
</configuration>

View File

@ -1,6 +1,6 @@
# defines the fields that are included in the response # defines the fields that are included in the response
# if no fields have been specified by the user # if no fields have been specified by the user
queryrest.default.response.fields=channel,pulseId,globalSeconds,shape,eventCount,value queryrest.default.response.fields=channel,pulseId,globalSeconds,iocSeconds,shape,eventCount,value
# aggregation which are included in the response by default if aggregation is enabled for a given query # aggregation which are included in the response by default if aggregation is enabled for a given query
queryrest.default.response.aggregations=min,mean,max queryrest.default.response.aggregations=min,mean,max

View File

@ -338,6 +338,44 @@ public class QueryRestControllerJsonTest extends AbstractDaqRestTest {
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].iocMillis").value(1010)); .andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].iocMillis").value(1010));
} }
@Test
public void testPulseRangeQuery_Fields() throws Exception {
DAQQuery request = new DAQQuery(
new RequestRangePulseId(
100,
199),
TEST_CHANNEL_NAMES);
request.addField(QueryField.pulseId);
request.addField(QueryField.eventCount);
request.setNrOfBins(2);
String content = mapper.writeValueAsString(request);
System.out.println(content);
this.mockMvc
.perform(MockMvcRequestBuilders
.post(QueryRestController.QUERY)
.contentType(MediaType.APPLICATION_JSON)
.content(content))
.andDo(MockMvcResultHandlers.print())
.andExpect(MockMvcResultMatchers.status().isOk())
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[0]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[0].channel.name").value(TEST_CHANNEL_01))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].pulseId").value(100))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[0].eventCount").value(50))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].pulseId").value(150))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].data[1].eventCount").value(50))
.andExpect(MockMvcResultMatchers.jsonPath("$[1]").exists())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].channel.name").value(TEST_CHANNEL_02))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].pulseId").value(100))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[0].eventCount").value(50))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].pulseId").value(150))
.andExpect(MockMvcResultMatchers.jsonPath("$[1].data[1].eventCount").value(50));
}
@Test @Test
public void testPulseRangeQueryBackends() throws Exception { public void testPulseRangeQueryBackends() throws Exception {
DAQQuery request = new DAQQuery( DAQQuery request = new DAQQuery(

View File

@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<configuration> <configuration scan="true">
<appender name="consoleAppender" class="ch.qos.logback.core.ConsoleAppender"> <appender name="consoleAppender" class="ch.qos.logback.core.ConsoleAppender">
<encoder> <encoder>
<Pattern>.%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg %n <Pattern>.%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg %n
@ -10,8 +10,8 @@
</filter> </filter>
</appender> </appender>
<logger name="ch.psi.daq.queryrest" additivity="false"> <logger name="ch.psi.daq.queryrest">
<level value="INFO" /> <level value="DEBUG" />
<appender-ref ref="consoleAppender" /> <appender-ref ref="consoleAppender" />
</logger> </logger>