config and scripts to work on xbl-daq-24

This commit is contained in:
Data Backend account
2022-02-11 13:51:30 +01:00
parent 8c5035e292
commit 1ca20a423d
4 changed files with 103 additions and 0 deletions
+22
View File
@@ -0,0 +1,22 @@
detsize 512 1024
#T65 test detector for SLS2
hostname beb003+beb043+
#top
0:rx_tcpport 1954
0:udp_dstport 50000
0:udp_dstport2 50001
0:udp_srcip 10.254.0.33
#bottom
1:rx_tcpport 1955
1:udp_dstport 50002
1:udp_dstport2 50003
1:udp_srcip 10.254.0.34
udp_dstip 10.254.0.1
udp_dstmac 24:be:05:bd:6c:52
tengiga 1
+10
View File
@@ -0,0 +1,10 @@
#!/bin/bash
GREP="std|streamvis"
while getopts g: flag
do
case "${flag}" in
g) GREP=${OPTARG};;
esac
done
systemctl list-units --type service --all | grep -E ${GREP} | awk 'BEGIN{print "Unit State Status"};$4 ~ /^running$/{print $1,$2,$4}' | column -t
+30
View File
@@ -0,0 +1,30 @@
#!/bin/bash
# usage ./start_eiger_detector.sh Eiger 1
if [ $# -lt 1 ]
then
echo "Usage : $0 DETECTOR_NAME <number_of_cycles>"
echo " DETECTOR_NAME: Eiger..."
echo " number_of_cycles : optional, default 100"
exit
fi
SLS_DET_PACKAGE_PATH=''
#SLS_DET_PACKAGE_PATH='/home/dbe/git/sf_daq_buffer_eiger/slsDetectorPackage/build/bin/'
# SLS_DET_PACKAGE_PATH='/home/hax_l/sf_daq_buffer/slsDetectorPackage/build/bin/'
# DETECTOR=$1
n_cycles=1
if [ $# == 2 ]
then
n_cycles=$2
fi
${SLS_DET_PACKAGE_PATH}sls_detector_put timing auto
${SLS_DET_PACKAGE_PATH}sls_detector_put triggers ${n_cycles}
${SLS_DET_PACKAGE_PATH}sls_detector_put exptime 0.000005
${SLS_DET_PACKAGE_PATH}sls_detector_put frames 60
${SLS_DET_PACKAGE_PATH}sls_detector_put dr 16
#sls_detector_put ${D}-clearbit to 0x5d 0 # normal mode, not highG0
${SLS_DET_PACKAGE_PATH}sls_detector_put acquire
echo "Now start trigger"
+41
View File
@@ -0,0 +1,41 @@
from datetime import datetime
import requests
import time
import json
# writer agent endpoint
URL = "http://127.0.0.1:5000"
# type of write request
SYNC = "/write_sync"
# details of request
n_images = 3
n_acquisitions = 1000
headers = {'Content-type': 'application/json'}
print("Performing sync aquisitions...")
for i in range(0,n_acquisitions):
output_file ='/home/dbe/git/sf_daq_buffer/eiger/xbl-daq-24/output_folder/eiger_sync_%s_%s.h5' % (datetime.now().strftime("%H%M%S"), i)
#output_file='/tmp/output.h5'
data = {'sources':'BEC.EG01V01', 'n_images':n_images, 'output_file':output_file}
print("REQUEST: ", i)
print("DATA: ", data)
r = requests.post(url = "http://127.0.0.1:5000/write_sync", json=data, headers=headers)
print("RESPONSE FROM REQUEST: ", r.text)
data = None
time.sleep(0.2)
# time.sleep(3)
# print("Performing async aquisitions...")
# for i in range(0,n_acquisitions):
# output_file ='/home/hax_l/tests/eiger_async_%s_%s.h5' % (datetime.now().strftime("%H%M%S"), i)
# data = {'sources':'eiger', 'n_images':n_images, 'output_file':output_file}
# print("REQUEST: ", i)
# print("DATA: ", data)
# r = requests.post(url = "http://127.0.0.1:5000/write_async", json=data, headers=headers)
# data = None
# time.sleep(1)
# #//TODO print("Testing kill aquisitions...")