From 1790544a357e08315b8be0cc7a7bb89c04cc1e15 Mon Sep 17 00:00:00 2001 From: babic_a Date: Tue, 29 Sep 2020 09:27:26 +0200 Subject: [PATCH 01/61] Remove live stream of pulse_ids --- sf-stream/src/ZmqLiveSender.cpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sf-stream/src/ZmqLiveSender.cpp b/sf-stream/src/ZmqLiveSender.cpp index 897c6c2..cf1b1aa 100644 --- a/sf-stream/src/ZmqLiveSender.cpp +++ b/sf-stream/src/ZmqLiveSender.cpp @@ -60,7 +60,7 @@ ZmqLiveSender::ZmqLiveSender( } } - { + if (false) { socket_pulse_ = zmq_socket(ctx, ZMQ_PUB); if (zmq_bind(socket_pulse_, config.pulse_address.c_str()) != 0) { @@ -121,9 +121,9 @@ void ZmqLiveSender::send(const ModuleFrameBuffer *meta, const char *data) } } - if(zmq_send(socket_pulse_, &pulse_id, sizeof(pulse_id), 0) == -1) { - throw runtime_error(zmq_strerror(errno)); - } +// if(zmq_send(socket_pulse_, &pulse_id, sizeof(pulse_id), 0) == -1) { +// throw runtime_error(zmq_strerror(errno)); +// } // TODO: Here we need to send to streamvis and live analysis metadata(probably need to operate still on them) and data(not every frame) From c494e7a9aba2c730032b9f94ee9ace31d800a199 Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Tue, 29 Sep 2020 09:39:43 +0200 Subject: [PATCH 02/61] Local changes --- CMakeLists.txt | 2 +- scripts/JF02-buffer-worker.sh | 5 +++-- scripts/JF02-stream.sh | 4 ++-- scripts/JF06-buffer-worker.sh | 3 ++- scripts/JF06-stream.service | 2 +- scripts/JF06-stream.sh | 3 ++- scripts/JF06_4M-buffer-worker.sh | 4 ++-- sf-stream/include/stream_config.hpp | 2 +- 8 files changed, 14 insertions(+), 11 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index d480f91..9bb6af6 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -33,4 +33,4 @@ add_subdirectory("sf-buffer") add_subdirectory("sf-stream") add_subdirectory("sf-writer") #add_subdirectory("jf-live-writer") -#add_subdirectory("jf-live-daq") \ No newline at end of file +#add_subdirectory("jf-live-daq") diff --git a/scripts/JF02-buffer-worker.sh b/scripts/JF02-buffer-worker.sh index 226db66..e8f0d2b 100644 --- a/scripts/JF02-buffer-worker.sh +++ b/scripts/JF02-buffer-worker.sh @@ -9,9 +9,10 @@ fi M=$1 # Add ourselves to the user cpuset. -# echo $$ > /sys/fs/cgroup/cpuset/user/tasks +echo $$ > /sys/fs/cgroup/cpuset/user/tasks -coreAssociatedBuffer=(39 39 39 40 40 40 41 41 41) +#coreAssociatedBuffer=(25 25 26 26 27 27 28 28 29) +coreAssociatedBuffer=(1 2 2 3 3 4 4 5 5) initialUDPport=50020 port=$((${initialUDPport}+10#${M})) diff --git a/scripts/JF02-stream.sh b/scripts/JF02-stream.sh index f150b3e..38b113d 100644 --- a/scripts/JF02-stream.sh +++ b/scripts/JF02-stream.sh @@ -1,5 +1,5 @@ #!/bin/bash - -coreAssociated="17,18,19" +echo $$ > /sys/fs/cgroup/cpuset/user/tasks +coreAssociated="33,34,35" taskset -c ${coreAssociated} /usr/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF02.json diff --git a/scripts/JF06-buffer-worker.sh b/scripts/JF06-buffer-worker.sh index dd9c721..bac2bdb 100644 --- a/scripts/JF06-buffer-worker.sh +++ b/scripts/JF06-buffer-worker.sh @@ -11,7 +11,8 @@ M=$1 # Add ourselves to the user cpuset. # echo $$ > /sys/fs/cgroup/cpuset/user/tasks -coreAssociatedBuffer=(22 22 23 23 24 24 25 25 26 26 27 27 28 28 29 29 30 30 31 31 32 32 33 33 34 34 35 35 36 36 37 37) +#coreAssociatedBuffer=(22 22 23 23 24 24 25 25 26 26 27 27 28 28 29 29 30 30 31 31 32 32 33 33 34 34 35 35 36 36 37 37) +coreAssociatedBuffer=(6 6 7 7 8 8 9 9 10 10 22 22 23 23 24 24 25 25 26 26 27 27 28 28 29 29 30 30 31 31 32 32) initialUDPport=50060 port=$((${initialUDPport}+10#${M})) diff --git a/scripts/JF06-stream.service b/scripts/JF06-stream.service index 9f6d3b6..9c5b2c6 100644 --- a/scripts/JF06-stream.service +++ b/scripts/JF06-stream.service @@ -8,7 +8,7 @@ User=root ExecStart=/usr/bin/sh /home/writer/git/sf_daq_buffer/scripts/JF06-stream.sh TimeoutStartSec=10 Restart=on-failure -RestartSec=10 +RestartSec=1 [Install] WantedBy=multi-user.target diff --git a/scripts/JF06-stream.sh b/scripts/JF06-stream.sh index 6d39268..c778845 100644 --- a/scripts/JF06-stream.sh +++ b/scripts/JF06-stream.sh @@ -1,5 +1,6 @@ #!/bin/bash -coreAssociated="13,14,15,16" +coreAssociated="2,3,4,5" +#echo $$ > /sys/fs/cgroup/cpuset/user/tasks taskset -c ${coreAssociated} /usr/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF06.json diff --git a/scripts/JF06_4M-buffer-worker.sh b/scripts/JF06_4M-buffer-worker.sh index c67c0e5..c596d9d 100644 --- a/scripts/JF06_4M-buffer-worker.sh +++ b/scripts/JF06_4M-buffer-worker.sh @@ -2,7 +2,7 @@ if [ $# != 1 ] then - systemctl start JF06_4M-buffer-worker@{00..31} + systemctl start JF06_4M-buffer-worker@{00..07} exit fi @@ -15,6 +15,6 @@ coreAssociatedBuffer=(22 23 24 25 26 27 28 29) initialUDPport=50060 port=$((${initialUDPport}+10#${M})) -DETECTOR=JF06T08V01 +DETECTOR=JF06T08V02 taskset -c ${coreAssociatedBuffer[10#${M}]} /usr/bin/sf_buffer ${DETECTOR} M${M} ${port} /gpfs/photonics/swissfel/buffer/${DETECTOR} ${M} diff --git a/sf-stream/include/stream_config.hpp b/sf-stream/include/stream_config.hpp index ac28d86..745b9cd 100644 --- a/sf-stream/include/stream_config.hpp +++ b/sf-stream/include/stream_config.hpp @@ -1,7 +1,7 @@ namespace stream_config { // N of IO threads to receive data from modules. - const int STREAM_ZMQ_IO_THREADS = 2; + const int STREAM_ZMQ_IO_THREADS = 5; // How long should the RECV queue be. const size_t STREAM_RCVHWM = 100; // Size of buffer between the receiving and sending part. From 858707391e12881e1f03985e1a69d4ad9064309c Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Tue, 29 Sep 2020 09:50:38 +0200 Subject: [PATCH 03/61] New scripts --- scripts/convert_detector_data.sh | 167 ++++++++++++++++++++++++++++++ scripts/export_file.py | 2 +- scripts/make_crystfel_list.py | 4 +- scripts/retrieve_detector_data.sh | 5 +- 4 files changed, 174 insertions(+), 4 deletions(-) create mode 100755 scripts/convert_detector_data.sh diff --git a/scripts/convert_detector_data.sh b/scripts/convert_detector_data.sh new file mode 100755 index 0000000..dd4b38d --- /dev/null +++ b/scripts/convert_detector_data.sh @@ -0,0 +1,167 @@ +#!/bin/bash + +if [ $# -lt 3 ] +then + echo "Usage : $0 detector_name start_pulse_id end_pulse_id " + echo "Example : $0 JF07T32V01 11709404000 11709405000 " + echo "Optional parameters: output_file_name rate_multiplicator jf_conversion run_file raw_file" + exit +fi + +DETECTOR=$1 +START_PULSE_ID=$2 +STOP_PULSE_ID=$3 +PULSE_ID_STEP=1 # by default assume 100Hz +JF_CONVERSION=0 # by default don't call ju_export +RUN_FILE=None +RAW_FILE=None + +echo "Request to retrieve : $@ " +echo "Started : "`date` +date1=$(date +%s) + +if [ $# -ge 4 ] +then + OUTFILE=$4 +else + OUTFILE=/gpfs/photonics/swissfel/buffer/test.${START_PULSE_ID}-${STOP_PULSE_ID}.h5 +fi + +if [ $# -ge 5 ] +then + PULSE_ID_STEP=$5 +fi + +if [ $# -ge 6 ] +then + JF_CONVERSION=$6 + if [ $# -ge 7 ] + then + RUN_FILE=$7 + fi + if [ $# -eq 8 ] + then + RAW_FILE=$8 + fi +fi + + +case ${DETECTOR} in +'JF01T03V01') + NM=3 + DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF01.json + ;; +'JF02T09V02') + NM=9 + DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF02.json + ;; +'JF06T32V02') + NM=32 + DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF06.json + ;; +'JF06T08V02') + NM=8 + DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF06_4M.json + ;; +'JF07T32V01') + NM=32 + DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF07.json + ;; +'JF13T01V01') + NM=1 + DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF13.json + ;; +*) + NM=1 +esac + +#coreAssociated="7,8,9,10,11,12,13,14" +coreAssociated="9,10,11,12,13,14,15,16,17" + +touch /tmp/detector_retrieve.log + +cd /gpfs/photonics/swissfel/buffer/ + +PREVIOUS_STILL_RUN=0 +while [ ${PREVIOUS_STILL_RUN} == 0 ] +do + sleep 15 # we need to sleep at least to make sure that we don't read from CURRENT file +# ps -fe | grep "bin/sf_writer " | grep -v grep | grep sf_writer > /dev/null +# PREVIOUS_STILL_RUN=$? # not found == 1 +# PREVIOUS_STILL_RUN=1 + n=`ps -fe | grep "bin/sf_writer " | grep -v grep | grep sf_writer | wc -l` + if [ ${n} -le 10 ] + then + PREVIOUS_STILL_RUN=1 + fi +done + +date2=$(date +%s) +echo -n "Waited Time : " +echo $((date2-date1)) | awk '{print int($1/60)":"int($1%60)}' +echo "Started actual retrieve : "`date` + +if [ ${JF_CONVERSION} == 0 ] +then + OUTFILE_RAW=${OUTFILE} +else + if [ ${RAW_FILE} != "None" ] + then + OUTFILE_RAW=${RAW_FILE} + D1=`dirname ${OUTFILE_RAW}` + mkdir -p ${D1} + else + RUN_NUMBER=`basename ${RUN_FILE} | awk -F '.' '{print $1}'` + D1=`dirname ${RUN_FILE}` + D2=`dirname ${D1}` + OUTFILE_RAW=${D2}/.raw/${RUN_NUMBER}.${DETECTOR}.h5 + mkdir -p ${D2}/.raw/ + fi +fi + +#taskset -c ${coreAssociated} /usr/bin/sf_writer ${OUTFILE_RAW} /gpfs/photonics/swissfel/buffer/${DETECTOR} ${NM} ${START_PULSE_ID} ${STOP_PULSE_ID} ${PULSE_ID_STEP}>> /tmp/detector_retrieve.log & + +#wait + +coreAssociatedConversion="35,34,33,32,31,30,29,28,27,9,10,11,12,13,14,15,16,17" +#coreAssociatedConversion="35,34,33,32,31,30,29,28,27" +#coreAssociatedConversion="35,34,33,32,31,30,29,28,27,26,25,24,23,22,21,20,19,18" +#TODO: calculate this number from coreAssociatedConversion +#export NUMBA_NUM_THREADS=18 + +date3=$(date +%s) +echo "Finished : "`date` +echo -n "Retrieve Time : " +echo $((date3-date2)) | awk '{print int($1/60)":"int($1%60)}' + +if [ ${JF_CONVERSION} == 0 ] +then + echo "File is written in raw format, no compression" +else + echo "Will call compression/convertion ${OUTFILE_RAW} --> ${OUTFILE}" + + PREVIOUS_STILL_RUN=0 + while [ ${PREVIOUS_STILL_RUN} == 0 ] + do + sleep $(( $RANDOM % 30 + 1 )) # we need to sleep at least to make sure that we don't read from CURRENT file + n=`ps -fe | grep "scripts/export_file.py " | grep -v grep | grep export | wc -l` + if [ ${n} -le 100 ] + then + PREVIOUS_STILL_RUN=1 + fi + done + date4=$(date +%s) + echo -n "Sleep Time : " + echo $((date4-date3)) | awk '{print int($1/60)":"int($1%60)}' + + export PATH=/home/dbe/miniconda3/bin:$PATH + source deactivate >/dev/null 2>&1 + source activate conversion + taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/export_file.py ${OUTFILE_RAW} ${OUTFILE} ${RUN_FILE} ${DET_CONFIG_FILE} +# python /home/dbe/git/sf_daq_buffer/scripts/make_crystfel_list.py ${OUTFILE} ${RUN_FILE} + date5=$(date +%s) + echo "Finished : "`date` + echo -n "Conversion Time : " + echo $((date5-date4)) | awk '{print int($1/60)":"int($1%60)}' + +fi diff --git a/scripts/export_file.py b/scripts/export_file.py index b0deccd..9c185d2 100644 --- a/scripts/export_file.py +++ b/scripts/export_file.py @@ -54,7 +54,7 @@ with ju.File( mask=mask, gap_pixels=gap_pixels, geometry=geometry, - parallel=False, + parallel=True, ) as juf: n_input_frames = len(juf["data"]) good_frames = np.nonzero(juf["is_good_frame"])[0] diff --git a/scripts/make_crystfel_list.py b/scripts/make_crystfel_list.py index 77ae477..8eb16dc 100644 --- a/scripts/make_crystfel_list.py +++ b/scripts/make_crystfel_list.py @@ -38,10 +38,12 @@ def is_it_dark(laser_mode, detector_rate, pulseid): parser = argparse.ArgumentParser() parser.add_argument("data_file", type=str) parser.add_argument("run_info", type=str) +parser.add_argument("detector", type=str) args = parser.parse_args() data_file = args.data_file run_info_file = args.run_info +detector = args.detector try: with open(run_info_file) as json_file: @@ -62,8 +64,6 @@ except: print(f"Can't open {data_file}") exit() -detector = 'JF06T32V02' - pulseids = f[f'/data/{detector}/pulse_id'][:] n_pulse_id = len(pulseids) if f'/data/{detector}/is_good_frame' in f.keys(): diff --git a/scripts/retrieve_detector_data.sh b/scripts/retrieve_detector_data.sh index ec13813..680112d 100755 --- a/scripts/retrieve_detector_data.sh +++ b/scripts/retrieve_detector_data.sh @@ -157,7 +157,10 @@ else source deactivate >/dev/null 2>&1 source activate conversion taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/export_file.py ${OUTFILE_RAW} ${OUTFILE} ${RUN_FILE} ${DET_CONFIG_FILE} - python /home/dbe/git/sf_daq_buffer/scripts/make_crystfel_list.py ${OUTFILE} ${RUN_FILE} + if [ ${DETECTOR} == "JF06T32V02" ] + then + python /home/dbe/git/sf_daq_buffer/scripts/make_crystfel_list.py ${OUTFILE} ${RUN_FILE} ${DETECTOR} + fi date5=$(date +%s) echo "Finished : "`date` echo -n "Conversion Time : " From 70c6e754f3a717e80b64994bc81c1d091de57b4e Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Tue, 29 Sep 2020 09:56:01 +0200 Subject: [PATCH 04/61] Cleanup of version files --- CMakeLists.txt | 2 - core-broker/core-broker/__init__.py | 0 core-broker/core-broker/broker_client.py | 71 ------- core-broker/core-broker/broker_debugger.py | 70 ------- epics-writer/Readme.md | 34 --- epics-writer/conda-recipe/build.sh | 3 - epics-writer/conda-recipe/meta.yaml | 20 -- epics-writer/epics_writer/__init__.py | 0 epics-writer/epics_writer/start.py | 99 --------- epics-writer/epics_writer/writer.py | 120 ----------- epics-writer/setup.py | 15 -- epics-writer/tests/__init__.py | 0 epics-writer/tests/channels.txt | 6 - epics-writer/tests/test_download_data.py | 34 --- jf-live-daq/CMakeLists.txt | 26 --- jf-live-daq/src/main.cpp | 49 ----- jf-live-daq/test/CMakeLists.txt | 10 - jf-live-daq/test/main.cpp | 9 - jf-live-writer/CMakeLists.txt | 22 -- jf-live-writer/include/BinaryReader.hpp | 28 --- jf-live-writer/include/JFH5LiveWriter.hpp | 49 ----- jf-live-writer/include/LiveImageAssembler.hpp | 51 ----- jf-live-writer/include/live_writer_config.hpp | 9 - jf-live-writer/src/BinaryReader.cpp | 102 --------- jf-live-writer/src/JFH5LiveWriter.cpp | 133 ------------ jf-live-writer/src/LiveImageAssembler.cpp | 159 -------------- jf-live-writer/src/main.cpp | 195 ------------------ jf-live-writer/test/CMakeLists.txt | 10 - jf-live-writer/test/main.cpp | 10 - jf-live-writer/test/test_BinaryReader.cpp | 10 - sf-stream/src/ZmqLiveSender.cpp | 25 --- 31 files changed, 1371 deletions(-) delete mode 100644 core-broker/core-broker/__init__.py delete mode 100644 core-broker/core-broker/broker_client.py delete mode 100644 core-broker/core-broker/broker_debugger.py delete mode 100644 epics-writer/Readme.md delete mode 100644 epics-writer/conda-recipe/build.sh delete mode 100644 epics-writer/conda-recipe/meta.yaml delete mode 100644 epics-writer/epics_writer/__init__.py delete mode 100644 epics-writer/epics_writer/start.py delete mode 100644 epics-writer/epics_writer/writer.py delete mode 100644 epics-writer/setup.py delete mode 100644 epics-writer/tests/__init__.py delete mode 100644 epics-writer/tests/channels.txt delete mode 100644 epics-writer/tests/test_download_data.py delete mode 100644 jf-live-daq/CMakeLists.txt delete mode 100644 jf-live-daq/src/main.cpp delete mode 100644 jf-live-daq/test/CMakeLists.txt delete mode 100644 jf-live-daq/test/main.cpp delete mode 100644 jf-live-writer/CMakeLists.txt delete mode 100644 jf-live-writer/include/BinaryReader.hpp delete mode 100644 jf-live-writer/include/JFH5LiveWriter.hpp delete mode 100644 jf-live-writer/include/LiveImageAssembler.hpp delete mode 100644 jf-live-writer/include/live_writer_config.hpp delete mode 100644 jf-live-writer/src/BinaryReader.cpp delete mode 100644 jf-live-writer/src/JFH5LiveWriter.cpp delete mode 100644 jf-live-writer/src/LiveImageAssembler.cpp delete mode 100644 jf-live-writer/src/main.cpp delete mode 100644 jf-live-writer/test/CMakeLists.txt delete mode 100644 jf-live-writer/test/main.cpp delete mode 100644 jf-live-writer/test/test_BinaryReader.cpp diff --git a/CMakeLists.txt b/CMakeLists.txt index 9bb6af6..6dbaafa 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -32,5 +32,3 @@ add_subdirectory("core-buffer") add_subdirectory("sf-buffer") add_subdirectory("sf-stream") add_subdirectory("sf-writer") -#add_subdirectory("jf-live-writer") -#add_subdirectory("jf-live-daq") diff --git a/core-broker/core-broker/__init__.py b/core-broker/core-broker/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/core-broker/core-broker/broker_client.py b/core-broker/core-broker/broker_client.py deleted file mode 100644 index 2b676fe..0000000 --- a/core-broker/core-broker/broker_client.py +++ /dev/null @@ -1,71 +0,0 @@ -import json - -from pika import BlockingConnection, ConnectionParameters, BasicProperties - - -class BrokerClient(object): - - REQUEST_EXCHANGE = "request" - STATUS_EXCHANGE = "status" - DEFAULT_BROKER_URL = "127.0.0.1" - - def __init__(self, broker_url=DEFAULT_BROKER_URL): - self.connection = BlockingConnection(ConnectionParameters(broker_url)) - self.channel = self.connection.channel() - - self.channel.exchange_declare(exchange=self.REQUEST_EXCHANGE, - exchange_type="topic") - - self.channel.exchange_declare(exchange=self.STATUS_EXCHANGE, - exchange_type="fanout") - - def close(self): - self.connection.close() - - def request_write(self, - output_prefix, - metadata=None, - detectors=None, - bsread_channels=None, - epics_pvs=None): - - routing_key = "." - - if detectors: - for detector in detectors: - routing_key += detector + "." - - if bsread_channels: - routing_key += "bsread" + "." - - if epics_pvs: - routing_key += "epics" + "." - - body_bytes = json.dumps({ - "output_prefix": output_prefix, - "metadata": metadata, - "detectors": detectors, - "bsread_channels": bsread_channels, - "epics_pvs": epics_pvs - }).encode() - - self.channel.basic_publish(exchange=self.REQUEST_EXCHANGE, - routing_key=routing_key, - body=body_bytes) - - status_header = { - "action": "write_request", - "source": "BrokerClient", - "routing_key": routing_key - } - - self.channel.basic_publish(exchange=self.STATUS_EXCHANGE, - properties=BasicProperties( - headers=status_header), - routing_key="", - body=body_bytes) - - -broker = BrokerClient() -broker.request_write("/tmp/test", epics_pvs=["test"]) -broker.close() diff --git a/core-broker/core-broker/broker_debugger.py b/core-broker/core-broker/broker_debugger.py deleted file mode 100644 index 52b504b..0000000 --- a/core-broker/core-broker/broker_debugger.py +++ /dev/null @@ -1,70 +0,0 @@ -from datetime import datetime -import json - -from pika import BlockingConnection, ConnectionParameters - -DEFAULT_BROKER_URL = "127.0.0.1" -STATUS_EXCHANGE = "status" - -COLOR_END_MARKER = '\x1b[0m' - - -def get_color_for_action(action): - - color_mapping = { - "write_request": "\x1b[34;1m", - "write_start": "\x1b[1;33;1m", - "write_finished": "\x1b[1;32;1m" - } - - return color_mapping.get(action, "") - - -def on_status(channel, method_frame, header_frame, body): - header = header_frame.headers - request = json.loads(body.decode()) - - action = header["action"] - source = header["source"] - - action_output = get_color_for_action(action) + action + COLOR_END_MARKER - time_output = datetime.utcnow().strftime("%Y%m%d-%H:%M:%S.%f") - - print("[%s] %s %s" % (time_output, action_output, source)) - print(request) - - -def connect_to_broker(broker_url): - connection = BlockingConnection(ConnectionParameters(broker_url)) - channel = connection.channel() - - channel.exchange_declare(exchange=STATUS_EXCHANGE, - exchange_type="fanout") - queue = channel.queue_declare(queue="", exclusive=True).method.queue - channel.queue_bind(queue=queue, - exchange=STATUS_EXCHANGE) - - channel.basic_consume(queue, on_status) - - try: - channel.start_consuming() - except KeyboardInterrupt: - channel.stop_consuming() - - -def main(): - import argparse - parser = argparse.ArgumentParser( - description="Connect and listen to broker events.") - - parser.add_argument('--broker_url', dest='broker_url', - default=DEFAULT_BROKER_URL, - help='RabbitMQ broker URL') - - args = parser.parse_args() - - connect_to_broker(broker_url=args.broker_url) - - -if __name__ == '__main__': - main() diff --git a/epics-writer/Readme.md b/epics-writer/Readme.md deleted file mode 100644 index 5f6ab51..0000000 --- a/epics-writer/Readme.md +++ /dev/null @@ -1,34 +0,0 @@ -# Overview - -Simple server to dump Epics Channel Access data to an HDF5 file. -The server gets an http callback from the Broker whenever there was an acquisition. - - -__Note: THIS IS/WAS A FRIDAY AFTERNOON HACK TO MAKE THE SWISSFEL DAQ WORK__ - - -The format of the request is as follows: -``` -{ - 'range': { - 'startPulseId': 100, - 'endPulseId': 120 - }, - - 'parameters': { - 'general/created': 'test', - 'general/user': 'tester', - 'general/process': 'test_process', - 'general/instrument': 'mac', - 'output_file': '/bla/test.h5'} -} - -``` - -Right now this server needs to run on the same server than the - -# Testing - -```bash -curl -XPUT -d '{"range":{"startPulseId": 7281433214, "endPulseId": 7281489688}, "parameters":{"output_file":"test.h5"}}' http://localhost:10200/notify -``` diff --git a/epics-writer/conda-recipe/build.sh b/epics-writer/conda-recipe/build.sh deleted file mode 100644 index d7a34f9..0000000 --- a/epics-writer/conda-recipe/build.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env bash - -$PYTHON setup.py install # Python command to install the script diff --git a/epics-writer/conda-recipe/meta.yaml b/epics-writer/conda-recipe/meta.yaml deleted file mode 100644 index 8b464c1..0000000 --- a/epics-writer/conda-recipe/meta.yaml +++ /dev/null @@ -1,20 +0,0 @@ -package: - name: epics-writer - version: 0.0.1 - -source: - path: .. - -build: - noarch: python - entry_points: - - epics-writer = epics_writer.start:main - -requirements: - build: - - python - run: - - python - - data_api >=0.7.6 - - requests - - pika diff --git a/epics-writer/epics_writer/__init__.py b/epics-writer/epics_writer/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/epics-writer/epics_writer/start.py b/epics-writer/epics_writer/start.py deleted file mode 100644 index 3647d6d..0000000 --- a/epics-writer/epics_writer/start.py +++ /dev/null @@ -1,99 +0,0 @@ -import json -from pika import BlockingConnection, ConnectionParameters, BasicProperties - -from epics_writer.writer import write_epics_pvs - - -DEFAULT_BROKER_URL = "127.0.0.1" -REQUEST_EXCHANGE = "request" -STATUS_EXCHANGE = "status" -QUEUE_NAME = "epics" -OUTPUT_FILE_SUFFIX = ".PVCHANNELS.h5" - - -def update_status(channel, body, action, file, message=None): - status_header = { - "action": action, - "source": "epics_writer", - "routing_key": QUEUE_NAME, - "file": file, - "message": message - } - - channel.basic_publish(exchange=STATUS_EXCHANGE, - properties=BasicProperties( - headers=status_header), - routing_key="", - body=body) - - -def on_message(channel, method_frame, header_frame, body): - - output_file = None - - try: - request = json.loads(body.decode()) - output_prefix = request["output_prefix"] - start_pulse_id = 1 - stop_pulse_id = 10 - metadata = request["metadata"] - epics_pvs = request["epics_pvs"] - - output_file = output_prefix + OUTPUT_FILE_SUFFIX - - update_status(channel, body, "write_start", output_file) - - write_epics_pvs(output_file=output_file, - start_pulse_id=start_pulse_id, - stop_pulse_id=stop_pulse_id, - metadata=metadata, - epics_pvs=epics_pvs) - - except Exception as e: - channel.basic_reject(delivery_tag=method_frame.delivery_tag, - requeue=False) - - update_status(channel, body, "write_rejected", output_file, str(e)) - - else: - channel.basic_ack(delivery_tag=method_frame.delivery_tag) - - update_status(channel, body, "write_finished", output_file) - - -def connect_to_broker(broker_url): - connection = BlockingConnection(ConnectionParameters(broker_url)) - channel = connection.channel() - - channel.exchange_declare(exchange=STATUS_EXCHANGE, - exchange_type="fanout") - channel.exchange_declare(exchange=REQUEST_EXCHANGE, - exchange_type="topic") - - channel.queue_declare(queue=QUEUE_NAME, auto_delete=True) - channel.queue_bind(queue=QUEUE_NAME, - exchange=REQUEST_EXCHANGE, - routing_key="*.%s.*" % QUEUE_NAME) - channel.basic_qos(prefetch_count=1) - channel.basic_consume(QUEUE_NAME, on_message) - - try: - channel.start_consuming() - except KeyboardInterrupt: - channel.stop_consuming() - - -def main(): - import argparse - parser = argparse.ArgumentParser(description='Epics HDF5 writer') - parser.add_argument('--broker_url', dest='broker_url', - default=DEFAULT_BROKER_URL, - help='RabbitMQ broker URL') - - args = parser.parse_args() - - connect_to_broker(broker_url=args.broker_url) - - -if __name__ == '__main__': - main() diff --git a/epics-writer/epics_writer/writer.py b/epics-writer/epics_writer/writer.py deleted file mode 100644 index fe9bc06..0000000 --- a/epics-writer/epics_writer/writer.py +++ /dev/null @@ -1,120 +0,0 @@ -import datetime -import time -import logging -import requests -import data_api - -logger = logging.getLogger(__name__) - -DATA_API_QUERY_URL = "https://data-api.psi.ch/sf/query" - - -def write_epics_pvs(output_file, start_pulse_id, stop_pulse_id, metadata, epics_pvs): - - start_date = get_pulse_id_date_mapping(start_pulse_id) - stop_date = get_pulse_id_date_mapping(stop_pulse_id) - - data = get_data(epics_pvs, start=start_date, stop=stop_date) - # TODO: Merge metadata to data. - - if data: - logger.info("Persist data to hdf5 file") - data_api.to_hdf5(data, output_file, overwrite=True, compression=None, shuffle=False) - else: - logger.error("No data retrieved") - open(output_file + "_NO_DATA", 'a').close() - - -def get_data(channel_list, start=None, stop=None, base_url=None): - logger.info("Requesting range %s to %s for channels: " % (start, stop, channel_list)) - - query = {"range": {"startDate": datetime.datetime.isoformat(start), - "endDate": datetime.datetime.isoformat(stop), - "startExpansion": True}, - "channels": channel_list, - "fields": ["pulseId", "globalSeconds", "globalDate", "value", - "eventCount"]} - logger.debug(query) - - response = requests.post(DATA_API_QUERY_URL, json=query) - - # Check for successful return of data - if response.status_code != 200: - logger.info("Data retrievali failed, sleep for another time and try") - - itry = 0 - while itry < 5: - itry += 1 - time.sleep(60) - response = requests.post(DATA_API_QUERY_URL, json=query) - if response.status_code == 200: - break - - logger.info("Data retrieval failed, post attempt %d" % itry) - - if response.status_code != 200: - raise RuntimeError("Unable to retrieve data from server: ", response) - - logger.info("Data retieval is successful") - - data = response.json() - - return data_api.client._build_pandas_data_frame(data, index_field="globalDate") - - -def get_pulse_id_date_mapping(pulse_id): - # See https://jira.psi.ch/browse/ATEST-897 for more details ... - logger.info("Retrieve pulse-id/date mapping for pulse_id %s" % pulse_id) - - try: - - query = {"range": {"startPulseId": 0, - "endPulseId": pulse_id}, - "limit": 1, - "ordering": "desc", - "channels": ["SIN-CVME-TIFGUN-EVR0:BUNCH-1-OK"], - "fields": ["pulseId", "globalDate"]} - - for c in range(10): - - response = requests.post("https://data-api.psi.ch/sf/query", json=query) - - # Check for successful return of data - if response.status_code != 200: - raise RuntimeError("Unable to retrieve data from server: ", response) - - data = response.json() - - if len(data[0]["data"]) == 0 or not "pulseId" in data[0]["data"][0]: - raise RuntimeError( - "Didn't get good responce from data_api : %s " % data) - - if not pulse_id == data[0]["data"][0]["pulseId"]: - logger.info("retrieval failed") - if c == 0: - ref_date = data[0]["data"][0]["globalDate"] - ref_date = dateutil.parser.parse(ref_date) - - now_date = datetime.datetime.now() - now_date = pytz.timezone('Europe/Zurich').localize( - now_date) - - check_date = ref_date + datetime.timedelta( - seconds=24) # 20 seconds should be enough - delta_date = check_date - now_date - - s = delta_date.seconds - logger.info("retry in " + str(s) + " seconds ") - if not s <= 0: - time.sleep(s) - continue - - raise RuntimeError('Unable to retrieve mapping') - - date = data[0]["data"][0]["globalDate"] - date = dateutil.parser.parse(date) - dates.append(date) - break - - except Exception as e: - raise RuntimeError('Unable to retrieve pulse_id date mapping') from e diff --git a/epics-writer/setup.py b/epics-writer/setup.py deleted file mode 100644 index d4358df..0000000 --- a/epics-writer/setup.py +++ /dev/null @@ -1,15 +0,0 @@ -from setuptools import setup - -setup( - name="cadump", - version="0.0.12", - author="Paul Scherrer Institute", - author_email="daq@psi.ch", - description="Interface to dump data from archiver/databuffer", - packages=["cadump"], - entry_points={ - 'console_scripts': [ - 'cadump_server = cadump.cadump:main', - ], - } -) diff --git a/epics-writer/tests/__init__.py b/epics-writer/tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/epics-writer/tests/channels.txt b/epics-writer/tests/channels.txt deleted file mode 100644 index 63868d2..0000000 --- a/epics-writer/tests/channels.txt +++ /dev/null @@ -1,6 +0,0 @@ -S10-CPCL-VM1MGC:LOAD - -ONE - TWO -# Comment that need to be removed -THREE # one more comment at the end that need to be removed diff --git a/epics-writer/tests/test_download_data.py b/epics-writer/tests/test_download_data.py deleted file mode 100644 index 87a7194..0000000 --- a/epics-writer/tests/test_download_data.py +++ /dev/null @@ -1,34 +0,0 @@ -import unittest -from unittest import TestCase -from cadump import cadump -import logging - -class TestDownloadData(TestCase): - - def test_download_data(self): - config = { - 'range': { - 'startPulseId': 9618913001, - 'endPulseId': 9618923000 - }, - - 'parameters': { - 'general/created': 'test', - 'general/user': 'tester', - 'general/process': 'test_process', - 'general/instrument': 'mac', - 'output_file': 'test.h5'} # this is usually the full path - } - - cadump.base_url = "https://data-api.psi.ch/sf" - cadump.download_data(config) - # self.fail() - - def test_read_channels(self): - channels = cadump.read_channels("channels.txt") - logging.info(channels) - self.assertEqual(len(channels), 4) - - -if __name__ == '__main__': - unittest.main() diff --git a/jf-live-daq/CMakeLists.txt b/jf-live-daq/CMakeLists.txt deleted file mode 100644 index f2763dc..0000000 --- a/jf-live-daq/CMakeLists.txt +++ /dev/null @@ -1,26 +0,0 @@ - -find_package(MPI REQUIRED) -# Because of openmpi. -add_definitions(-DOMPI_SKIP_MPICXX) - -file(GLOB SOURCES - src/*.cpp) - -add_library(jf-live-daq-lib STATIC ${SOURCES}) -target_include_directories(jf-live-daq-lib - PUBLIC include/ - SYSTEM ${MPI_INCLUDE_PATH}) - -target_link_libraries(jf-live-daq-lib - external - core-buffer-lib - ${MPI_LIBRARIES}) - -add_executable(jf-live-daq src/main.cpp) -set_target_properties(jf-live-daq PROPERTIES OUTPUT_NAME jf_live_daq) -target_link_libraries(jf-live-daq - jf-live-daq-lib - ) - -enable_testing() -add_subdirectory(test/) \ No newline at end of file diff --git a/jf-live-daq/src/main.cpp b/jf-live-daq/src/main.cpp deleted file mode 100644 index 6069a0b..0000000 --- a/jf-live-daq/src/main.cpp +++ /dev/null @@ -1,49 +0,0 @@ -#include -#include - -void receive() -{ - -} - -void assemble() -{ - -} - -void write() -{ - -} - -int main(int argc, char** argv) -{ - // Initialize the MPI environment - MPI_Init(NULL, NULL); - - // Get the number of processes - int world_size; - MPI_Comm_size(MPI_COMM_WORLD, &world_size); - - // Get the rank of the process - int world_rank; - MPI_Comm_rank(MPI_COMM_WORLD, &world_rank); - - // Get the name of the processor - char processor_name[MPI_MAX_PROCESSOR_NAME]; - int name_len; - MPI_Get_processor_name(processor_name, &name_len); - - const int n_modules = 16; - - if (world_rank == 0) { - assemble(); - } else if (world_rank <= n_modules) { - receive(); - } else { - write(); - } - - // Finalize the MPI environment. - MPI_Finalize(); -} diff --git a/jf-live-daq/test/CMakeLists.txt b/jf-live-daq/test/CMakeLists.txt deleted file mode 100644 index 7dc93bb..0000000 --- a/jf-live-daq/test/CMakeLists.txt +++ /dev/null @@ -1,10 +0,0 @@ -add_executable(jf-live-daq-tests main.cpp) - -target_link_libraries(jf-live-daq-tests - jf-live-daq-lib - hdf5 - hdf5_hl - hdf5_cpp - zmq - gtest - ) diff --git a/jf-live-daq/test/main.cpp b/jf-live-daq/test/main.cpp deleted file mode 100644 index 1ea4d8a..0000000 --- a/jf-live-daq/test/main.cpp +++ /dev/null @@ -1,9 +0,0 @@ -#include "gtest/gtest.h" - - -using namespace std; - -int main(int argc, char **argv) { - ::testing::InitGoogleTest(&argc, argv); - return RUN_ALL_TESTS(); -} diff --git a/jf-live-writer/CMakeLists.txt b/jf-live-writer/CMakeLists.txt deleted file mode 100644 index 6028bfe..0000000 --- a/jf-live-writer/CMakeLists.txt +++ /dev/null @@ -1,22 +0,0 @@ -file(GLOB SOURCES - src/*.cpp) - -add_library(jf-live-writer-lib STATIC ${SOURCES}) -target_include_directories(jf-live-writer-lib PUBLIC include/) -target_link_libraries(jf-live-writer-lib - external - core-buffer-lib) - -add_executable(jf-live-writer src/main.cpp) -set_target_properties(jf-live-writer PROPERTIES OUTPUT_NAME jf_live_writer) -target_link_libraries(jf-live-writer - jf-live-writer-lib - sf-writer-lib - hdf5 - hdf5_hl - hdf5_cpp - pthread - ) - -enable_testing() -add_subdirectory(test/) \ No newline at end of file diff --git a/jf-live-writer/include/BinaryReader.hpp b/jf-live-writer/include/BinaryReader.hpp deleted file mode 100644 index 85d2a0c..0000000 --- a/jf-live-writer/include/BinaryReader.hpp +++ /dev/null @@ -1,28 +0,0 @@ -#ifndef SF_DAQ_BUFFER_BINARYREADER_HPP -#define SF_DAQ_BUFFER_BINARYREADER_HPP - - -#include - -class BinaryReader { - - const std::string detector_folder_; - const std::string module_name_; - - std::string current_input_file_; - int input_file_fd_; - - void open_file(const std::string& filename); - void close_current_file(); - -public: - BinaryReader(const std::string &detector_folder, - const std::string &module_name); - - ~BinaryReader(); - - void get_frame(const uint64_t pulse_id, BufferBinaryFormat *buffer); -}; - - -#endif //SF_DAQ_BUFFER_BINARYREADER_HPP diff --git a/jf-live-writer/include/JFH5LiveWriter.hpp b/jf-live-writer/include/JFH5LiveWriter.hpp deleted file mode 100644 index a417631..0000000 --- a/jf-live-writer/include/JFH5LiveWriter.hpp +++ /dev/null @@ -1,49 +0,0 @@ -#ifndef SFWRITER_HPP -#define SFWRITER_HPP - -#include -#include -#include - -#include "LiveImageAssembler.hpp" - -const auto& H5_UINT64 = H5::PredType::NATIVE_UINT64; -const auto& H5_UINT32 = H5::PredType::NATIVE_UINT32; -const auto& H5_UINT16 = H5::PredType::NATIVE_UINT16; -const auto& H5_UINT8 = H5::PredType::NATIVE_UINT8; - -class JFH5LiveWriter { - - const std::string detector_name_; - const size_t n_modules_; - const size_t n_pulses_; - - size_t write_index_; - - H5::H5File file_; - H5::DataSet image_dataset_; - - uint64_t* b_pulse_id_; - uint64_t* b_frame_index_; - uint32_t* b_daq_rec_; - uint8_t* b_is_good_frame_ ; - - void init_file(const std::string &output_file); - void write_dataset(const std::string name, - const void *buffer, - const H5::PredType &type); - void write_metadata(); - std::string get_detector_name(const std::string& detector_folder); - - void close_file(); - -public: - JFH5LiveWriter(const std::string& output_file, - const std::string& detector_folder, - const size_t n_modules, - const size_t n_pulses); - ~JFH5LiveWriter(); - void write(const ImageMetadata* metadata, const char* data); -}; - -#endif //SFWRITER_HPP diff --git a/jf-live-writer/include/LiveImageAssembler.hpp b/jf-live-writer/include/LiveImageAssembler.hpp deleted file mode 100644 index 5bcb749..0000000 --- a/jf-live-writer/include/LiveImageAssembler.hpp +++ /dev/null @@ -1,51 +0,0 @@ -#ifndef SF_DAQ_BUFFER_LIVEIMAGEASSEMBLER_HPP -#define SF_DAQ_BUFFER_LIVEIMAGEASSEMBLER_HPP - -#include - -#include "buffer_config.hpp" -#include "formats.hpp" - -const uint64_t IA_EMPTY_SLOT_VALUE = 0; - -struct ImageMetadata -{ - uint64_t pulse_id; - uint64_t frame_index; - uint32_t daq_rec; - uint8_t is_good_image; -}; - -class LiveImageAssembler { - const size_t n_modules_; - const size_t image_buffer_slot_n_bytes_; - - char* image_buffer_; - ImageMetadata* image_meta_buffer_; - ModuleFrame* frame_meta_buffer_; - std::atomic_int* buffer_status_; - std::atomic_uint64_t* buffer_pulse_id_; - - size_t get_data_offset(const uint64_t slot_id, const int i_module); - size_t get_frame_metadata_offset(const uint64_t slot_id, const int i_module); - -public: - LiveImageAssembler(const size_t n_modules); - - virtual ~LiveImageAssembler(); - - bool is_slot_free(const uint64_t pulse_id); - bool is_slot_full(const uint64_t pulse_id); - - void process(const uint64_t pulse_id, - const int i_module, - const BufferBinaryFormat* block_buffer); - - void free_slot(const uint64_t pulse_id); - - ImageMetadata* get_metadata_buffer(const uint64_t pulse_id); - char* get_data_buffer(const uint64_t pulse_id); -}; - - -#endif //SF_DAQ_BUFFER_LIVEIMAGEASSEMBLER_HPP diff --git a/jf-live-writer/include/live_writer_config.hpp b/jf-live-writer/include/live_writer_config.hpp deleted file mode 100644 index 0a62457..0000000 --- a/jf-live-writer/include/live_writer_config.hpp +++ /dev/null @@ -1,9 +0,0 @@ -#include - -namespace live_writer_config -{ - // MS to retry reading from the image assembler. - const size_t ASSEMBLER_RETRY_MS = 5; - // Number of slots in the reconstruction buffer. - const size_t WRITER_IA_N_SLOTS = 200; -} \ No newline at end of file diff --git a/jf-live-writer/src/BinaryReader.cpp b/jf-live-writer/src/BinaryReader.cpp deleted file mode 100644 index 0512ac7..0000000 --- a/jf-live-writer/src/BinaryReader.cpp +++ /dev/null @@ -1,102 +0,0 @@ -#include "BinaryReader.hpp" - -#include -#include -#include -#include - -#include "BufferUtils.hpp" -#include "buffer_config.hpp" - -using namespace std; -using namespace buffer_config; - -BinaryReader::BinaryReader( - const std::string &detector_folder, - const std::string &module_name) : - detector_folder_(detector_folder), - module_name_(module_name), - current_input_file_(""), - input_file_fd_(-1) -{} - -BinaryReader::~BinaryReader() -{ - close_current_file(); -} - -void BinaryReader::get_frame( - const uint64_t pulse_id, BufferBinaryFormat* buffer) -{ - - auto current_frame_file = BufferUtils::get_filename( - detector_folder_, module_name_, pulse_id); - - if (current_frame_file != current_input_file_) { - open_file(current_frame_file); - } - - size_t file_index = BufferUtils::get_file_frame_index(pulse_id); - size_t n_bytes_offset = file_index * sizeof(BufferBinaryFormat); - - auto lseek_result = lseek(input_file_fd_, n_bytes_offset, SEEK_SET); - if (lseek_result < 0) { - stringstream err_msg; - - err_msg << "[BinaryReader::get_frame]"; - err_msg << " Error while lseek on file "; - err_msg << current_input_file_ << " for n_bytes_offset "; - err_msg << n_bytes_offset << ": " << strerror(errno) << endl; - - throw runtime_error(err_msg.str()); - } - - auto n_bytes = ::read(input_file_fd_, buffer, sizeof(BufferBinaryFormat)); - - if (n_bytes < sizeof(BufferBinaryFormat)) { - stringstream err_msg; - - err_msg << "[BinaryReader::get_block]"; - err_msg << " Error while reading from file "; - err_msg << current_input_file_ << ": " << strerror(errno) << endl; - - throw runtime_error(err_msg.str()); - } -} - -void BinaryReader::open_file(const std::string& filename) -{ - close_current_file(); - - input_file_fd_ = open(filename.c_str(), O_RDONLY); - - if (input_file_fd_ < 0) { - stringstream err_msg; - - err_msg << "[BinaryReader::open_file]"; - err_msg << " Cannot open file " << filename << ": "; - err_msg << strerror(errno) << endl; - - throw runtime_error(err_msg.str()); - } - - current_input_file_ = filename; -} - -void BinaryReader::close_current_file() -{ - if (input_file_fd_ != -1) { - if (close(input_file_fd_) < 0) { - stringstream err_msg; - - err_msg << "[BinaryWriter::close_current_file]"; - err_msg << " Error while closing file " << current_input_file_; - err_msg << ": " << strerror(errno) << endl; - - throw runtime_error(err_msg.str()); - } - - input_file_fd_ = -1; - current_input_file_ = ""; - } -} diff --git a/jf-live-writer/src/JFH5LiveWriter.cpp b/jf-live-writer/src/JFH5LiveWriter.cpp deleted file mode 100644 index 5928a6e..0000000 --- a/jf-live-writer/src/JFH5LiveWriter.cpp +++ /dev/null @@ -1,133 +0,0 @@ -#include "JFH5LiveWriter.hpp" - -#include -#include - - -#include "buffer_config.hpp" - -using namespace std; -using namespace buffer_config; - -JFH5LiveWriter::JFH5LiveWriter(const string& output_file, - const string& detector_folder, - const size_t n_modules, - const size_t n_pulses) : - detector_name_(get_detector_name(detector_folder)), - n_modules_(n_modules), - n_pulses_(n_pulses), - write_index_(0) -{ - b_pulse_id_ = new uint64_t[n_pulses_]; - b_frame_index_= new uint64_t[n_pulses_]; - b_daq_rec_ = new uint32_t[n_pulses_]; - b_is_good_frame_ = new uint8_t[n_pulses_]; - - init_file(output_file); -} - -void JFH5LiveWriter::init_file(const string& output_file) -{ - file_ = H5::H5File(output_file, H5F_ACC_TRUNC); - file_.createGroup("/data"); - file_.createGroup("/data/" + detector_name_); - - H5::DataSpace att_space(H5S_SCALAR); - H5::DataType data_type = H5::StrType(0, H5T_VARIABLE); - - file_.createGroup("/general"); - auto detector_dataset = file_.createDataSet( - "/general/detector_name", data_type ,att_space); - - detector_dataset.write(detector_name_, data_type); - - hsize_t image_dataset_dims[3] = - {n_pulses_, n_modules_ * MODULE_Y_SIZE, MODULE_X_SIZE}; - - H5::DataSpace image_dataspace(3, image_dataset_dims); - - hsize_t image_dataset_chunking[3] = - {1, n_modules_ * MODULE_Y_SIZE, MODULE_X_SIZE}; - H5::DSetCreatPropList image_dataset_properties; - image_dataset_properties.setChunk(3, image_dataset_chunking); - - image_dataset_ = file_.createDataSet( - "/data/" + detector_name_ + "/data", - H5_UINT16, - image_dataspace, - image_dataset_properties); -} - - -std::string JFH5LiveWriter::get_detector_name(const string& detector_folder) -{ - size_t last_separator; - if ((last_separator = detector_folder.rfind("/")) == string::npos) { - return detector_folder; - } - - return detector_folder.substr(last_separator + 1); -} - -JFH5LiveWriter::~JFH5LiveWriter() -{ - close_file(); - - delete[] b_pulse_id_; - delete[] b_frame_index_; - delete[] b_daq_rec_; - delete[] b_is_good_frame_; -} - -void JFH5LiveWriter::write_dataset( - const string name, const void* buffer, const H5::PredType& type) -{ - hsize_t b_m_dims[] = {n_pulses_}; - H5::DataSpace b_m_space (1, b_m_dims); - - hsize_t f_m_dims[] = {n_pulses_, 1}; - H5::DataSpace f_m_space(2, f_m_dims); - - auto complete_name = "/data/" + detector_name_ + "/" + name; - auto dataset = file_.createDataSet(complete_name, type, f_m_space); - - dataset.write(buffer, type, b_m_space, f_m_space); - - dataset.close(); -} - -void JFH5LiveWriter::write_metadata() -{ - write_dataset("pulse_id", &b_pulse_id_, H5_UINT64); - write_dataset("frame_index", &b_frame_index_, H5_UINT64); - write_dataset("daq_rec", &b_daq_rec_, H5_UINT32); - write_dataset("is_good_frame", &b_is_good_frame_, H5_UINT8); -} - -void JFH5LiveWriter::close_file() -{ - if (file_.getId() == -1) { - return; - } - - image_dataset_.close(); - - write_metadata(); - - file_.close(); -} - -void JFH5LiveWriter::write(const ImageMetadata* metadata, const char* data) -{ - hsize_t offset[] = {write_index_, 0, 0}; - - H5DOwrite_chunk(image_dataset_.getId(), H5P_DEFAULT, 0, - offset, MODULE_N_BYTES * n_modules_, data); - - b_pulse_id_[write_index_] = metadata->pulse_id; - b_frame_index_[write_index_] = metadata->frame_index; - b_daq_rec_[write_index_] = metadata->daq_rec; - b_is_good_frame_[write_index_] = metadata->is_good_image; - - write_index_++; -} diff --git a/jf-live-writer/src/LiveImageAssembler.cpp b/jf-live-writer/src/LiveImageAssembler.cpp deleted file mode 100644 index 57cf48b..0000000 --- a/jf-live-writer/src/LiveImageAssembler.cpp +++ /dev/null @@ -1,159 +0,0 @@ -#include - -#include "LiveImageAssembler.hpp" -#include "buffer_config.hpp" -#include "live_writer_config.hpp" - -using namespace std; -using namespace buffer_config; -using namespace live_writer_config; - -LiveImageAssembler::LiveImageAssembler(const size_t n_modules) : - n_modules_(n_modules), - image_buffer_slot_n_bytes_(MODULE_N_BYTES * n_modules_) -{ - image_buffer_ = new char[WRITER_IA_N_SLOTS * image_buffer_slot_n_bytes_]; - image_meta_buffer_ = new ImageMetadata[WRITER_IA_N_SLOTS]; - frame_meta_buffer_ = new ModuleFrame[WRITER_IA_N_SLOTS * n_modules]; - buffer_status_ = new atomic_int[WRITER_IA_N_SLOTS]; - buffer_pulse_id_ = new atomic_uint64_t[WRITER_IA_N_SLOTS]; - - for (size_t i=0; i < WRITER_IA_N_SLOTS; i++) { - free_slot(i); - } -} - -LiveImageAssembler::~LiveImageAssembler() -{ - delete[] image_buffer_; - delete[] image_meta_buffer_; -} - -bool LiveImageAssembler::is_slot_free(const uint64_t pulse_id) -{ - auto slot_id = pulse_id % WRITER_IA_N_SLOTS; - - uint64_t slot_pulse_id = IA_EMPTY_SLOT_VALUE; - if (buffer_pulse_id_[slot_id].compare_exchange_strong( - slot_pulse_id, pulse_id)) { - return true; - } - - auto is_free = buffer_status_[slot_id].load(memory_order_relaxed) > 0; - return is_free && (slot_pulse_id == pulse_id); -} - -bool LiveImageAssembler::is_slot_full(const uint64_t pulse_id) -{ - auto slot_id = pulse_id % WRITER_IA_N_SLOTS; - return buffer_status_[slot_id].load(memory_order_relaxed) == 0; -} - -size_t LiveImageAssembler::get_data_offset( - const uint64_t slot_id, const int i_module) -{ - size_t slot_i_offset = slot_id * image_buffer_slot_n_bytes_; - size_t module_i_offset = i_module * MODULE_N_BYTES; - - return slot_i_offset + module_i_offset; -} - -size_t LiveImageAssembler::get_frame_metadata_offset( - const uint64_t slot_id, const int i_module) -{ - size_t slot_m_offset = slot_id * n_modules_; - size_t module_m_offset = i_module; - - return slot_m_offset + module_m_offset; -} - -void LiveImageAssembler::process( - const uint64_t pulse_id, - const int i_module, - const BufferBinaryFormat* file_buffer) -{ - const auto slot_id = pulse_id % WRITER_IA_N_SLOTS; - - auto frame_meta_offset = get_frame_metadata_offset(slot_id, i_module); - auto image_offset = get_data_offset(slot_id, i_module); - - memcpy( - &(frame_meta_buffer_[frame_meta_offset]), - &(file_buffer->metadata), - sizeof(file_buffer->metadata)); - - memcpy( - image_buffer_ + image_offset, - &(file_buffer->data[0]), - MODULE_N_BYTES); - - buffer_status_[slot_id].fetch_sub(1, memory_order_relaxed); -} - -void LiveImageAssembler::free_slot(const uint64_t pulse_id) -{ - auto slot_id = pulse_id % WRITER_IA_N_SLOTS; - buffer_status_[slot_id].store(n_modules_, memory_order_relaxed); - buffer_pulse_id_[slot_id].store(IA_EMPTY_SLOT_VALUE, memory_order_relaxed); -} - -ImageMetadata* LiveImageAssembler::get_metadata_buffer(const uint64_t pulse_id) -{ - const auto slot_id = pulse_id % WRITER_IA_N_SLOTS; - - ImageMetadata& image_meta = image_meta_buffer_[slot_id]; - - auto frame_meta_offset = get_frame_metadata_offset(slot_id, 0); - - auto is_pulse_init = false; - image_meta.is_good_image = 1; - image_meta.pulse_id = 0; - - for (size_t i_module=0; i_module < n_modules_; i_module++) { - - auto& frame_meta = frame_meta_buffer_[frame_meta_offset]; - frame_meta_offset += 1; - - auto is_good_frame = - frame_meta.n_recv_packets == JF_N_PACKETS_PER_FRAME; - - if (!is_good_frame) { - image_meta.pulse_id = 0; - continue; - } - - if (!is_pulse_init) { - image_meta.pulse_id = frame_meta.pulse_id; - image_meta.frame_index = frame_meta.frame_index; - image_meta.daq_rec = frame_meta.daq_rec; - - is_pulse_init = true; - } - - if (image_meta.is_good_image == 1) { - if (frame_meta.pulse_id != image_meta.pulse_id) { - image_meta.is_good_image = 0; - } - - if (frame_meta.frame_index != image_meta.frame_index) { - image_meta.is_good_image = 0; - } - - if (frame_meta.daq_rec != image_meta.daq_rec) { - image_meta.is_good_image = 0; - } - - if (frame_meta.n_recv_packets != JF_N_PACKETS_PER_FRAME) { - image_meta.is_good_image = 0; - } - } - } - - return &image_meta; -} - -char* LiveImageAssembler::get_data_buffer(const uint64_t pulse_id) -{ - auto slot_id = pulse_id % WRITER_IA_N_SLOTS; - return image_buffer_ + (slot_id * image_buffer_slot_n_bytes_); -} diff --git a/jf-live-writer/src/main.cpp b/jf-live-writer/src/main.cpp deleted file mode 100644 index 139a34f..0000000 --- a/jf-live-writer/src/main.cpp +++ /dev/null @@ -1,195 +0,0 @@ -#include -#include -#include -#include -#include - -#include "zmq.h" -#include "live_writer_config.hpp" -#include "buffer_config.hpp" -#include "bitshuffle/bitshuffle.h" -#include "JFH5LiveWriter.hpp" -#include "LiveImageAssembler.hpp" -#include "BinaryReader.hpp" - -using namespace std; -using namespace chrono; -using namespace buffer_config; -using namespace live_writer_config; - -void read_buffer( - const string detector_folder, - const string module_name, - const int i_module, - const vector& pulse_ids_to_write, - LiveImageAssembler& image_assembler, - void* ctx) -{ - BinaryReader reader(detector_folder, module_name); - auto frame_buffer = new BufferBinaryFormat(); - - void* socket = zmq_socket(ctx, ZMQ_SUB); - if (socket == nullptr) { - throw runtime_error(zmq_strerror(errno)); - } - - int rcvhwm = 100; - if (zmq_setsockopt(socket, ZMQ_RCVHWM, &rcvhwm, sizeof(rcvhwm)) != 0) { - throw runtime_error(zmq_strerror(errno)); - } - - int linger = 0; - if (zmq_setsockopt(socket, ZMQ_LINGER, &linger, sizeof(linger)) != 0) { - throw runtime_error(zmq_strerror(errno)); - } - - // In milliseconds. - int rcvto = 2000; - if (zmq_setsockopt(socket, ZMQ_RCVTIMEO, &rcvto, sizeof(rcvto)) != 0 ){ - throw runtime_error(zmq_strerror(errno)); - } - - if (zmq_connect(socket, "tcp://127.0.0.1:51234") != 0) { - throw runtime_error(zmq_strerror(errno)); - } - - if (zmq_setsockopt(socket, ZMQ_SUBSCRIBE, "", 0) != 0) { - throw runtime_error(zmq_strerror(errno)); - } - - const uint64_t PULSE_ID_DELAY = 100; - - uint64_t live_pulse_id = pulse_ids_to_write.front(); - for (uint64_t pulse_id:pulse_ids_to_write) { - - while(!image_assembler.is_slot_free(pulse_id)) { - this_thread::sleep_for(chrono::milliseconds(ASSEMBLER_RETRY_MS)); - } - - auto start_time = steady_clock::now(); - - // Enforce a delay of 1 second for writing. - while (live_pulse_id - pulse_id < PULSE_ID_DELAY) { - if (zmq_recv(socket, &live_pulse_id, - sizeof(live_pulse_id), 0) == -1) { - if (errno == EAGAIN) { - throw runtime_error("Did not receive pulse_id in time."); - } else { - throw runtime_error(zmq_strerror(errno)); - } - } - } - - reader.get_frame(pulse_id, frame_buffer); - - auto end_time = steady_clock::now(); - uint64_t read_us_duration = duration_cast( - end_time-start_time).count(); - - start_time = steady_clock::now(); - - image_assembler.process(pulse_id, i_module, frame_buffer); - - end_time = steady_clock::now(); - uint64_t compose_us_duration = duration_cast( - end_time-start_time).count(); - - cout << "sf_writer:avg_read_us "; - cout << read_us_duration / BUFFER_BLOCK_SIZE << endl; - cout << "sf_writer:avg_assemble_us "; - cout << compose_us_duration / BUFFER_BLOCK_SIZE << endl; - } - - delete frame_buffer; -} - -int main (int argc, char *argv[]) -{ - if (argc != 7) { - cout << endl; - cout << "Usage: sf_writer [output_file] [detector_folder] [n_modules]"; - cout << " [start_pulse_id] [n_pulses] [pulse_id_step]"; - cout << endl; - cout << "\toutput_file: Complete path to the output file." << endl; - cout << "\tdetector_folder: Absolute path to detector buffer." << endl; - cout << "\tn_modules: number of modules" << endl; - cout << "\tstart_pulse_id: Start pulse_id of retrieval." << endl; - cout << "\tn_pulses: Number of pulses to write." << endl; - cout << "\tpulse_id_step: 1==100Hz, 2==50hz, 4==25Hz.." << endl; - cout << endl; - - exit(-1); - } - - string output_file = string(argv[1]); - const string detector_folder = string(argv[2]); - size_t n_modules = atoi(argv[3]); - uint64_t start_pulse_id = (uint64_t) atoll(argv[4]); - size_t n_pulses = (size_t) atoll(argv[5]); - int pulse_id_step = atoi(argv[6]); - - std::vector pulse_ids_to_write; - uint64_t i_pulse_id = start_pulse_id; - for (size_t i=0; i reading_threads(n_modules); - for (size_t i_module=0; i_module( - end_time-start_time).count(); - - image_assembler.free_slot(pulse_id); - - cout << "sf_writer:avg_write_us "; - cout << write_us_duration / BUFFER_BLOCK_SIZE << endl; - } - - for (auto& reading_thread : reading_threads) { - if (reading_thread.joinable()) { - reading_thread.join(); - } - } - - return 0; -} diff --git a/jf-live-writer/test/CMakeLists.txt b/jf-live-writer/test/CMakeLists.txt deleted file mode 100644 index 1079fc2..0000000 --- a/jf-live-writer/test/CMakeLists.txt +++ /dev/null @@ -1,10 +0,0 @@ -add_executable(jf-live-writer-tests main.cpp) - -target_link_libraries(jf-live-writer-tests - jf-live-writer-lib - hdf5 - hdf5_hl - hdf5_cpp - zmq - gtest - ) diff --git a/jf-live-writer/test/main.cpp b/jf-live-writer/test/main.cpp deleted file mode 100644 index 69b7f53..0000000 --- a/jf-live-writer/test/main.cpp +++ /dev/null @@ -1,10 +0,0 @@ -#include "gtest/gtest.h" - -#include "test_BinaryReader.cpp" - -using namespace std; - -int main(int argc, char **argv) { - ::testing::InitGoogleTest(&argc, argv); - return RUN_ALL_TESTS(); -} diff --git a/jf-live-writer/test/test_BinaryReader.cpp b/jf-live-writer/test/test_BinaryReader.cpp deleted file mode 100644 index cc30157..0000000 --- a/jf-live-writer/test/test_BinaryReader.cpp +++ /dev/null @@ -1,10 +0,0 @@ -#include -#include "gtest/gtest.h" - -TEST(BinaryReader, basic_interaction) { - // TODO: Write some real tests. - auto detector_folder = "test_device"; - auto module_name = "M1"; - BinaryReader reader(detector_folder, module_name); -} - diff --git a/sf-stream/src/ZmqLiveSender.cpp b/sf-stream/src/ZmqLiveSender.cpp index cf1b1aa..78cf055 100644 --- a/sf-stream/src/ZmqLiveSender.cpp +++ b/sf-stream/src/ZmqLiveSender.cpp @@ -59,27 +59,6 @@ ZmqLiveSender::ZmqLiveSender( throw runtime_error(zmq_strerror(errno)); } } - - if (false) { - socket_pulse_ = zmq_socket(ctx, ZMQ_PUB); - - if (zmq_bind(socket_pulse_, config.pulse_address.c_str()) != 0) { - throw runtime_error(zmq_strerror(errno)); - } - - const int sndhwm = PULSE_ZMQ_SNDHWM; - if (zmq_setsockopt( - socket_pulse_, ZMQ_SNDHWM, &sndhwm, sizeof(sndhwm)) != 0) { - throw runtime_error(zmq_strerror(errno)); - } - - const int linger = 0; - if (zmq_setsockopt( - socket_pulse_, ZMQ_LINGER, &linger, sizeof(linger)) != 0) { - throw runtime_error(zmq_strerror(errno)); - } - } - } ZmqLiveSender::~ZmqLiveSender() @@ -121,10 +100,6 @@ void ZmqLiveSender::send(const ModuleFrameBuffer *meta, const char *data) } } -// if(zmq_send(socket_pulse_, &pulse_id, sizeof(pulse_id), 0) == -1) { -// throw runtime_error(zmq_strerror(errno)); -// } - // TODO: Here we need to send to streamvis and live analysis metadata(probably need to operate still on them) and data(not every frame) header.AddMember("frame", frame_index, header_alloc); From 95ae7d801157ffda45d374b79eebb5990bc8ede4 Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Tue, 29 Sep 2020 09:59:44 +0200 Subject: [PATCH 05/61] Set initial version --- CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 6dbaafa..491fb13 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -3,7 +3,7 @@ cmake_minimum_required(VERSION 3.12) project(sf_daq_buffer) set(CMAKE_CXX_STANDARD 17) -set (LIB_CPP_H5_WRITER_VERSION "1.0.0") +set (SF_DAQ_BUFFER_VERSION "1.0.0") set (CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}) From 5453f38e9fd0b630597297ff4aaad901daf90e59 Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Tue, 29 Sep 2020 17:28:58 +0200 Subject: [PATCH 06/61] switch off parallel for conversion and added script to test conversion --- scripts/export_file.py | 2 +- scripts/test_convertion.sh | 50 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 51 insertions(+), 1 deletion(-) create mode 100755 scripts/test_convertion.sh diff --git a/scripts/export_file.py b/scripts/export_file.py index 9c185d2..b0deccd 100644 --- a/scripts/export_file.py +++ b/scripts/export_file.py @@ -54,7 +54,7 @@ with ju.File( mask=mask, gap_pixels=gap_pixels, geometry=geometry, - parallel=True, + parallel=False, ) as juf: n_input_frames = len(juf["data"]) good_frames = np.nonzero(juf["is_good_frame"])[0] diff --git a/scripts/test_convertion.sh b/scripts/test_convertion.sh new file mode 100755 index 0000000..d8bfe90 --- /dev/null +++ b/scripts/test_convertion.sh @@ -0,0 +1,50 @@ +#!/bin/bash + +export PATH=/home/dbe/miniconda3/bin:$PATH +source deactivate >/dev/null 2>&1 +source activate conversion + +export NUMBA_NUM_THREADS=$1 +OUTDIR=/sf/alvra/data/p18674/raw/run_info/003000/CONVERSION-PAR-${NUMBA_NUM_THREADS} + +#coreAssociatedBuffer=(35 34 33 32 31 30 29 28 27 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 8 7 6 5 4 3 2 1 0) +#coreAssociatedBuffer=(35 34 33 32 31 30 29 28 27 18 19 20 21 22 23 24 25 26 9 10 11 12 13 14 15 16 17 8 7 6 5 4 3 2 1 0) + +#coreAssociatedBuffer=(35 26 34 25 33 24 32 23 31 22 30 21 29 20 28 19 27 18 17 8 16 7 15 6 14 5 13 4 12 3 11 2 10 1 9) + +coreAssociated="35,26,34,25,33,24,32,23,31,22,30,21,29,20,28,19,27,18" + +for N in 1 3 5 7 9 11 13 15 17 19 21 23 25 27 29 31 33 35 +do + + for n in `seq -f %02g 1 $N` + do + sleep 0.1 +# c=`echo $n - 1 | bc` +# echo process : $n cores : ${coreAssociatedBuffer[10#${c}]} + echo process : $n cores :${coreAssociated} + rm -rf /sf/alvra/data/p18674/raw/run_info/003000/conversion_0030${n}.log +# taskset -c ${coreAssociatedBuffer[10#${c}]} python /home/dbe/git/sf_daq_buffer/scripts/export_file.py /sf/alvra/data/p18674/raw//RAW_DATA/test_16M/run_0030${n}.JF06T32V02.h5 /sf/alvra/data/p18674/raw/test_16M/run_0030${n}.JF06T32V02.h5 /sf/alvra/data/p18674/raw/run_info/003000/run_0030${n}.json /gpfs/photonics/swissfel/buffer/config/stream-JF06.json > /sf/alvra/data/p18674/raw/run_info/003000/conversion_0030${n}.log & + taskset -c ${coreAssociated} python /home/dbe/git/sf_daq_buffer/scripts/export_file.py /sf/alvra/data/p18674/raw//RAW_DATA/test_16M/run_0030${n}.JF06T32V02.h5 /sf/alvra/data/p18674/raw/test_16M/run_0030${n}.JF06T32V02.h5 /sf/alvra/data/p18674/raw/run_info/003000/run_0030${n}.json /gpfs/photonics/swissfel/buffer/config/stream-JF06.json > /sf/alvra/data/p18674/raw/run_info/003000/conversion_0030${n}.log & + + done + echo Submitted + + A=0 + while [ $A -lt 30 ] + do + sleep 30 + A=`grep read /sf/alvra/data/p18674/raw/run_info/003000/conversion_003001.log | wc -l` + echo Number of cycles passed $A + done + + K=`ps -fe | grep export | grep -v grep | awk '{print $2}' | xargs` + echo Killing `ps -fe | grep export | grep -v grep | awk '{print $2}' | wc -l` processes ${K} + kill -9 ${K} + + sleep 2 + + mkdir -p ${OUTDIR}/${N} + mv /sf/alvra/data/p18674/raw/run_info/003000/conversion_0030* ${OUTDIR}/${N}/. + +done From f313c4a07b799fa3f4b54111ce8e3252ac1f2947 Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Thu, 1 Oct 2020 08:36:19 +0200 Subject: [PATCH 07/61] update to script to test conversion --- scripts/test_convertion.sh | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/scripts/test_convertion.sh b/scripts/test_convertion.sh index d8bfe90..354a4fe 100755 --- a/scripts/test_convertion.sh +++ b/scripts/test_convertion.sh @@ -4,28 +4,33 @@ export PATH=/home/dbe/miniconda3/bin:$PATH source deactivate >/dev/null 2>&1 source activate conversion -export NUMBA_NUM_THREADS=$1 -OUTDIR=/sf/alvra/data/p18674/raw/run_info/003000/CONVERSION-PAR-${NUMBA_NUM_THREADS} +#export NUMBA_NUM_THREADS=$1 +#OUTDIR=/sf/alvra/data/p18674/raw/run_info/003000/CONVERSION-PAR-${NUMBA_NUM_THREADS} +OUTDIR=/sf/alvra/data/p18674/raw/run_info/003000/CONVERSION-NEW.NO-LOAD.3-PIN #coreAssociatedBuffer=(35 34 33 32 31 30 29 28 27 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 8 7 6 5 4 3 2 1 0) #coreAssociatedBuffer=(35 34 33 32 31 30 29 28 27 18 19 20 21 22 23 24 25 26 9 10 11 12 13 14 15 16 17 8 7 6 5 4 3 2 1 0) -#coreAssociatedBuffer=(35 26 34 25 33 24 32 23 31 22 30 21 29 20 28 19 27 18 17 8 16 7 15 6 14 5 13 4 12 3 11 2 10 1 9) +coreAssociatedBuffer=(35 26 34 25 33 24 32 23 31 22 30 21 29 20 28 19 27 18 17 8 16 7 15 6 14 5 13 4 12 3 11 2 10 1 9) coreAssociated="35,26,34,25,33,24,32,23,31,22,30,21,29,20,28,19,27,18" -for N in 1 3 5 7 9 11 13 15 17 19 21 23 25 27 29 31 33 35 +#for N in 1 3 5 7 9 11 13 15 17 19 21 23 25 27 29 31 33 35 +for N in 1 12 14 16 18 20 2 4 6 8 10 22 24 26 28 30 32 35 do for n in `seq -f %02g 1 $N` do - sleep 0.1 -# c=`echo $n - 1 | bc` -# echo process : $n cores : ${coreAssociatedBuffer[10#${c}]} - echo process : $n cores :${coreAssociated} rm -rf /sf/alvra/data/p18674/raw/run_info/003000/conversion_0030${n}.log -# taskset -c ${coreAssociatedBuffer[10#${c}]} python /home/dbe/git/sf_daq_buffer/scripts/export_file.py /sf/alvra/data/p18674/raw//RAW_DATA/test_16M/run_0030${n}.JF06T32V02.h5 /sf/alvra/data/p18674/raw/test_16M/run_0030${n}.JF06T32V02.h5 /sf/alvra/data/p18674/raw/run_info/003000/run_0030${n}.json /gpfs/photonics/swissfel/buffer/config/stream-JF06.json > /sf/alvra/data/p18674/raw/run_info/003000/conversion_0030${n}.log & - taskset -c ${coreAssociated} python /home/dbe/git/sf_daq_buffer/scripts/export_file.py /sf/alvra/data/p18674/raw//RAW_DATA/test_16M/run_0030${n}.JF06T32V02.h5 /sf/alvra/data/p18674/raw/test_16M/run_0030${n}.JF06T32V02.h5 /sf/alvra/data/p18674/raw/run_info/003000/run_0030${n}.json /gpfs/photonics/swissfel/buffer/config/stream-JF06.json > /sf/alvra/data/p18674/raw/run_info/003000/conversion_0030${n}.log & + sleep 0.1 + + c=`echo $n - 1 | bc` + echo process : $n cores : ${coreAssociatedBuffer[10#${c}]} + taskset -c ${coreAssociatedBuffer[10#${c}]} python /home/dbe/git/sf_daq_buffer/scripts/export_file.py /sf/alvra/data/p18674/raw//RAW_DATA/test_16M/run_0030${n}.JF06T32V02.h5 /sf/alvra/data/p18674/raw/test_16M/run_0030${n}.JF06T32V02.h5 /sf/alvra/data/p18674/raw/run_info/003000/run_0030${n}.json /gpfs/photonics/swissfel/buffer/config/stream-JF06.json > /sf/alvra/data/p18674/raw/run_info/003000/conversion_0030${n}.log & + +# echo process : $n cores :${coreAssociated} +# rm -rf /sf/alvra/data/p18674/raw/run_info/003000/conversion_0030${n}.log +# taskset -c ${coreAssociated} python /home/dbe/git/sf_daq_buffer/scripts/export_file.py /sf/alvra/data/p18674/raw//RAW_DATA/test_16M/run_0030${n}.JF06T32V02.h5 /sf/alvra/data/p18674/raw/test_16M/run_0030${n}.JF06T32V02.h5 /sf/alvra/data/p18674/raw/run_info/003000/run_0030${n}.json /gpfs/photonics/swissfel/buffer/config/stream-JF06.json > /sf/alvra/data/p18674/raw/run_info/003000/conversion_0030${n}.log & done echo Submitted From 554ca8effba6d4b27496e9fb4ccedeac97c5a82d Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Thu, 1 Oct 2020 16:02:25 +0200 Subject: [PATCH 08/61] last version of test script used at sf-daq-1 --- scripts/test_convertion.sh | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/scripts/test_convertion.sh b/scripts/test_convertion.sh index 354a4fe..382ed20 100755 --- a/scripts/test_convertion.sh +++ b/scripts/test_convertion.sh @@ -6,7 +6,7 @@ source activate conversion #export NUMBA_NUM_THREADS=$1 #OUTDIR=/sf/alvra/data/p18674/raw/run_info/003000/CONVERSION-PAR-${NUMBA_NUM_THREADS} -OUTDIR=/sf/alvra/data/p18674/raw/run_info/003000/CONVERSION-NEW.NO-LOAD.3-PIN +OUTDIR=/sf/alvra/data/p18674/raw/run_info/003000/CONVERSION-NEW.NO-LOAD.4 #coreAssociatedBuffer=(35 34 33 32 31 30 29 28 27 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 8 7 6 5 4 3 2 1 0) #coreAssociatedBuffer=(35 34 33 32 31 30 29 28 27 18 19 20 21 22 23 24 25 26 9 10 11 12 13 14 15 16 17 8 7 6 5 4 3 2 1 0) @@ -24,13 +24,13 @@ do rm -rf /sf/alvra/data/p18674/raw/run_info/003000/conversion_0030${n}.log sleep 0.1 - c=`echo $n - 1 | bc` - echo process : $n cores : ${coreAssociatedBuffer[10#${c}]} - taskset -c ${coreAssociatedBuffer[10#${c}]} python /home/dbe/git/sf_daq_buffer/scripts/export_file.py /sf/alvra/data/p18674/raw//RAW_DATA/test_16M/run_0030${n}.JF06T32V02.h5 /sf/alvra/data/p18674/raw/test_16M/run_0030${n}.JF06T32V02.h5 /sf/alvra/data/p18674/raw/run_info/003000/run_0030${n}.json /gpfs/photonics/swissfel/buffer/config/stream-JF06.json > /sf/alvra/data/p18674/raw/run_info/003000/conversion_0030${n}.log & +# c=`echo $n - 1 | bc` +# echo process : $n cores : ${coreAssociatedBuffer[10#${c}]} +# taskset -c ${coreAssociatedBuffer[10#${c}]} python /home/dbe/git/sf_daq_buffer/scripts/export_file.py /sf/alvra/data/p18674/raw//RAW_DATA/test_16M/run_0030${n}.JF06T32V02.h5 /sf/alvra/data/p18674/raw/test_16M/run_0030${n}.JF06T32V02.h5 /sf/alvra/data/p18674/raw/run_info/003000/run_0030${n}.json /gpfs/photonics/swissfel/buffer/config/stream-JF06.json > /sf/alvra/data/p18674/raw/run_info/003000/conversion_0030${n}.log & -# echo process : $n cores :${coreAssociated} -# rm -rf /sf/alvra/data/p18674/raw/run_info/003000/conversion_0030${n}.log -# taskset -c ${coreAssociated} python /home/dbe/git/sf_daq_buffer/scripts/export_file.py /sf/alvra/data/p18674/raw//RAW_DATA/test_16M/run_0030${n}.JF06T32V02.h5 /sf/alvra/data/p18674/raw/test_16M/run_0030${n}.JF06T32V02.h5 /sf/alvra/data/p18674/raw/run_info/003000/run_0030${n}.json /gpfs/photonics/swissfel/buffer/config/stream-JF06.json > /sf/alvra/data/p18674/raw/run_info/003000/conversion_0030${n}.log & + echo process : $n cores :${coreAssociated} + rm -rf /sf/alvra/data/p18674/raw/run_info/003000/conversion_0030${n}.log + taskset -c ${coreAssociated} python /home/dbe/git/sf_daq_buffer/scripts/export_file.py /sf/alvra/data/p18674/raw//RAW_DATA/test_16M/run_0030${n}.JF06T32V02.h5 /sf/alvra/data/p18674/raw/test_16M/run_0030${n}.JF06T32V02.h5 /sf/alvra/data/p18674/raw/run_info/003000/run_0030${n}.json /gpfs/photonics/swissfel/buffer/config/stream-JF06.json > /sf/alvra/data/p18674/raw/run_info/003000/conversion_0030${n}.log & done echo Submitted From 4077ca933878bbcb2628dc9f00b0e33edb3ee455 Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Thu, 8 Oct 2020 09:07:30 +0200 Subject: [PATCH 09/61] adaptation of detector scripts for current conda --- scripts/convert_detector_data.sh | 167 ------------------------------ scripts/retrieve_detector_data.sh | 13 ++- 2 files changed, 9 insertions(+), 171 deletions(-) delete mode 100755 scripts/convert_detector_data.sh diff --git a/scripts/convert_detector_data.sh b/scripts/convert_detector_data.sh deleted file mode 100755 index dd4b38d..0000000 --- a/scripts/convert_detector_data.sh +++ /dev/null @@ -1,167 +0,0 @@ -#!/bin/bash - -if [ $# -lt 3 ] -then - echo "Usage : $0 detector_name start_pulse_id end_pulse_id " - echo "Example : $0 JF07T32V01 11709404000 11709405000 " - echo "Optional parameters: output_file_name rate_multiplicator jf_conversion run_file raw_file" - exit -fi - -DETECTOR=$1 -START_PULSE_ID=$2 -STOP_PULSE_ID=$3 -PULSE_ID_STEP=1 # by default assume 100Hz -JF_CONVERSION=0 # by default don't call ju_export -RUN_FILE=None -RAW_FILE=None - -echo "Request to retrieve : $@ " -echo "Started : "`date` -date1=$(date +%s) - -if [ $# -ge 4 ] -then - OUTFILE=$4 -else - OUTFILE=/gpfs/photonics/swissfel/buffer/test.${START_PULSE_ID}-${STOP_PULSE_ID}.h5 -fi - -if [ $# -ge 5 ] -then - PULSE_ID_STEP=$5 -fi - -if [ $# -ge 6 ] -then - JF_CONVERSION=$6 - if [ $# -ge 7 ] - then - RUN_FILE=$7 - fi - if [ $# -eq 8 ] - then - RAW_FILE=$8 - fi -fi - - -case ${DETECTOR} in -'JF01T03V01') - NM=3 - DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF01.json - ;; -'JF02T09V02') - NM=9 - DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF02.json - ;; -'JF06T32V02') - NM=32 - DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF06.json - ;; -'JF06T08V02') - NM=8 - DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF06_4M.json - ;; -'JF07T32V01') - NM=32 - DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF07.json - ;; -'JF13T01V01') - NM=1 - DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF13.json - ;; -*) - NM=1 -esac - -#coreAssociated="7,8,9,10,11,12,13,14" -coreAssociated="9,10,11,12,13,14,15,16,17" - -touch /tmp/detector_retrieve.log - -cd /gpfs/photonics/swissfel/buffer/ - -PREVIOUS_STILL_RUN=0 -while [ ${PREVIOUS_STILL_RUN} == 0 ] -do - sleep 15 # we need to sleep at least to make sure that we don't read from CURRENT file -# ps -fe | grep "bin/sf_writer " | grep -v grep | grep sf_writer > /dev/null -# PREVIOUS_STILL_RUN=$? # not found == 1 -# PREVIOUS_STILL_RUN=1 - n=`ps -fe | grep "bin/sf_writer " | grep -v grep | grep sf_writer | wc -l` - if [ ${n} -le 10 ] - then - PREVIOUS_STILL_RUN=1 - fi -done - -date2=$(date +%s) -echo -n "Waited Time : " -echo $((date2-date1)) | awk '{print int($1/60)":"int($1%60)}' -echo "Started actual retrieve : "`date` - -if [ ${JF_CONVERSION} == 0 ] -then - OUTFILE_RAW=${OUTFILE} -else - if [ ${RAW_FILE} != "None" ] - then - OUTFILE_RAW=${RAW_FILE} - D1=`dirname ${OUTFILE_RAW}` - mkdir -p ${D1} - else - RUN_NUMBER=`basename ${RUN_FILE} | awk -F '.' '{print $1}'` - D1=`dirname ${RUN_FILE}` - D2=`dirname ${D1}` - OUTFILE_RAW=${D2}/.raw/${RUN_NUMBER}.${DETECTOR}.h5 - mkdir -p ${D2}/.raw/ - fi -fi - -#taskset -c ${coreAssociated} /usr/bin/sf_writer ${OUTFILE_RAW} /gpfs/photonics/swissfel/buffer/${DETECTOR} ${NM} ${START_PULSE_ID} ${STOP_PULSE_ID} ${PULSE_ID_STEP}>> /tmp/detector_retrieve.log & - -#wait - -coreAssociatedConversion="35,34,33,32,31,30,29,28,27,9,10,11,12,13,14,15,16,17" -#coreAssociatedConversion="35,34,33,32,31,30,29,28,27" -#coreAssociatedConversion="35,34,33,32,31,30,29,28,27,26,25,24,23,22,21,20,19,18" -#TODO: calculate this number from coreAssociatedConversion -#export NUMBA_NUM_THREADS=18 - -date3=$(date +%s) -echo "Finished : "`date` -echo -n "Retrieve Time : " -echo $((date3-date2)) | awk '{print int($1/60)":"int($1%60)}' - -if [ ${JF_CONVERSION} == 0 ] -then - echo "File is written in raw format, no compression" -else - echo "Will call compression/convertion ${OUTFILE_RAW} --> ${OUTFILE}" - - PREVIOUS_STILL_RUN=0 - while [ ${PREVIOUS_STILL_RUN} == 0 ] - do - sleep $(( $RANDOM % 30 + 1 )) # we need to sleep at least to make sure that we don't read from CURRENT file - n=`ps -fe | grep "scripts/export_file.py " | grep -v grep | grep export | wc -l` - if [ ${n} -le 100 ] - then - PREVIOUS_STILL_RUN=1 - fi - done - date4=$(date +%s) - echo -n "Sleep Time : " - echo $((date4-date3)) | awk '{print int($1/60)":"int($1%60)}' - - export PATH=/home/dbe/miniconda3/bin:$PATH - source deactivate >/dev/null 2>&1 - source activate conversion - taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/export_file.py ${OUTFILE_RAW} ${OUTFILE} ${RUN_FILE} ${DET_CONFIG_FILE} -# python /home/dbe/git/sf_daq_buffer/scripts/make_crystfel_list.py ${OUTFILE} ${RUN_FILE} - date5=$(date +%s) - echo "Finished : "`date` - echo -n "Conversion Time : " - echo $((date5-date4)) | awk '{print int($1/60)":"int($1%60)}' - -fi diff --git a/scripts/retrieve_detector_data.sh b/scripts/retrieve_detector_data.sh index 680112d..d697bea 100755 --- a/scripts/retrieve_detector_data.sh +++ b/scripts/retrieve_detector_data.sh @@ -125,6 +125,7 @@ wait #coreAssociatedConversion="35,34,33,32,31,30,29,28,27" coreAssociatedConversion="35,34,33,32,31,30,29,28,27,26,25,24,23,22,21,20,19,18" +#coreAssociatedConversion="26,25,24,23,22,21,20,19,18" #TODO: calculate this number from coreAssociatedConversion #export NUMBA_NUM_THREADS=18 @@ -144,7 +145,7 @@ else do sleep 15 # we need to sleep at least to make sure that we don't read from CURRENT file n=`ps -fe | grep "scripts/export_file.py " | grep -v grep | grep export | wc -l` - if [ ${n} -lt 18 ] + if [ ${n} -lt 15 ] then PREVIOUS_STILL_RUN=1 fi @@ -154,10 +155,14 @@ else echo $((date4-date3)) | awk '{print int($1/60)":"int($1%60)}' export PATH=/home/dbe/miniconda3/bin:$PATH - source deactivate >/dev/null 2>&1 - source activate conversion + + source /home/dbe/miniconda3/etc/profile.d/conda.sh + + conda deactivate + conda activate bsread + taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/export_file.py ${OUTFILE_RAW} ${OUTFILE} ${RUN_FILE} ${DET_CONFIG_FILE} - if [ ${DETECTOR} == "JF06T32V02" ] + if [ ${DETECTOR} == "JF06T32V02" ] || [ ${DETECTOR} == "JF06T08V02" ] then python /home/dbe/git/sf_daq_buffer/scripts/make_crystfel_list.py ${OUTFILE} ${RUN_FILE} ${DETECTOR} fi From 11ffd66fc1efd68d974ac817e1717a59fffb8b32 Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Thu, 8 Oct 2020 09:10:54 +0200 Subject: [PATCH 10/61] adaptation of detector scripts for current conda --- scripts/start_detector.sh | 52 -------------------------------------- scripts/test_convertion.sh | 12 ++++++--- 2 files changed, 8 insertions(+), 56 deletions(-) delete mode 100755 scripts/start_detector.sh diff --git a/scripts/start_detector.sh b/scripts/start_detector.sh deleted file mode 100755 index d87b4e9..0000000 --- a/scripts/start_detector.sh +++ /dev/null @@ -1,52 +0,0 @@ -#!/bin/bash - -if [ $# -lt 1 ] -then - echo "Usage : $0 DETECTOR_NAME " - echo " DETECTOR_NAME: JF07 or JF01..." - echo " number_of_cycles : optional, default 100" - exit -fi - -DETECTOR=$1 -case ${DETECTOR} in -'JF01') - D=1 - ;; -'JF02') - D=2 - ;; -'JF06') - D=6 - ;; -'JF07') - D=7 - ;; -'JF13') - D=13 - ;; -*) - echo "Unsupported detector" - exit - ;; -esac - -n_cycles=100 -if [ $# == 2 ] -then - n_cycles=$2 -fi - -export PATH=/home/dbe/miniconda3/bin:$PATH -source deactivate -source activate dia - -sls_detector_put ${D}-timing trigger -sls_detector_put ${D}-cycles ${n_cycles} -sls_detector_put ${D}-exptime 5e-06 -sls_detector_put ${D}-frames 1 -sls_detector_put ${D}-dr 16 -#sls_detector_put ${D}-clearbit to 0x5d 0 # normal mode, not highG0 -sls_detector_put ${D}-status start - -echo "Now start trigger" diff --git a/scripts/test_convertion.sh b/scripts/test_convertion.sh index 382ed20..cce2d27 100755 --- a/scripts/test_convertion.sh +++ b/scripts/test_convertion.sh @@ -1,12 +1,14 @@ #!/bin/bash export PATH=/home/dbe/miniconda3/bin:$PATH -source deactivate >/dev/null 2>&1 -source activate conversion +source /home/dbe/miniconda3/etc/profile.d/conda.sh +conda deactivate +conda activate bsread + #export NUMBA_NUM_THREADS=$1 #OUTDIR=/sf/alvra/data/p18674/raw/run_info/003000/CONVERSION-PAR-${NUMBA_NUM_THREADS} -OUTDIR=/sf/alvra/data/p18674/raw/run_info/003000/CONVERSION-NEW.NO-LOAD.4 +OUTDIR=/sf/alvra/data/p18674/raw/run_info/003000/CONVERSION-NEW.20-daq2 #coreAssociatedBuffer=(35 34 33 32 31 30 29 28 27 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 8 7 6 5 4 3 2 1 0) #coreAssociatedBuffer=(35 34 33 32 31 30 29 28 27 18 19 20 21 22 23 24 25 26 9 10 11 12 13 14 15 16 17 8 7 6 5 4 3 2 1 0) @@ -14,9 +16,11 @@ OUTDIR=/sf/alvra/data/p18674/raw/run_info/003000/CONVERSION-NEW.NO-LOAD.4 coreAssociatedBuffer=(35 26 34 25 33 24 32 23 31 22 30 21 29 20 28 19 27 18 17 8 16 7 15 6 14 5 13 4 12 3 11 2 10 1 9) coreAssociated="35,26,34,25,33,24,32,23,31,22,30,21,29,20,28,19,27,18" +#coreAssociated="35,34,33,32,31,30,29,28,27" +#coreAssociated="26,25,24,23,22,21,20,19,18" #for N in 1 3 5 7 9 11 13 15 17 19 21 23 25 27 29 31 33 35 -for N in 1 12 14 16 18 20 2 4 6 8 10 22 24 26 28 30 32 35 +for N in 10 12 14 16 18 20 1 2 4 6 8 22 24 26 28 30 32 35 do for n in `seq -f %02g 1 $N` From 7a115cdbffc133237bd0fc5181ff32429cc25266 Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Wed, 14 Oct 2020 13:55:51 +0200 Subject: [PATCH 11/61] adaptation to new conda environment name --- scripts/retrieve_detector_data.sh | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/scripts/retrieve_detector_data.sh b/scripts/retrieve_detector_data.sh index d697bea..dfd8f5f 100755 --- a/scripts/retrieve_detector_data.sh +++ b/scripts/retrieve_detector_data.sh @@ -86,9 +86,6 @@ PREVIOUS_STILL_RUN=0 while [ ${PREVIOUS_STILL_RUN} == 0 ] do sleep 15 # we need to sleep at least to make sure that we don't read from CURRENT file -# ps -fe | grep "bin/sf_writer " | grep -v grep | grep sf_writer > /dev/null -# PREVIOUS_STILL_RUN=$? # not found == 1 -# PREVIOUS_STILL_RUN=1 n=`ps -fe | grep "bin/sf_writer " | grep -v grep | grep sf_writer | wc -l` if [ ${n} -lt 9 ] then @@ -119,7 +116,7 @@ else fi fi -taskset -c ${coreAssociated} /usr/bin/sf_writer ${OUTFILE_RAW} /gpfs/photonics/swissfel/buffer/${DETECTOR} ${NM} ${START_PULSE_ID} ${STOP_PULSE_ID} ${PULSE_ID_STEP}>> /tmp/detector_retrieve.log & +taskset -c ${coreAssociated} /usr/local/bin/sf_writer ${OUTFILE_RAW} /gpfs/photonics/swissfel/buffer/${DETECTOR} ${NM} ${START_PULSE_ID} ${STOP_PULSE_ID} ${PULSE_ID_STEP}>> /tmp/detector_retrieve.log & wait @@ -159,7 +156,7 @@ else source /home/dbe/miniconda3/etc/profile.d/conda.sh conda deactivate - conda activate bsread + conda activate sf-daq taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/export_file.py ${OUTFILE_RAW} ${OUTFILE} ${RUN_FILE} ${DET_CONFIG_FILE} if [ ${DETECTOR} == "JF06T32V02" ] || [ ${DETECTOR} == "JF06T08V02" ] From 41b8d2d0ef9bd0960b0a81146fcc673e94e663a9 Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Wed, 14 Oct 2020 15:12:34 +0200 Subject: [PATCH 12/61] for unknown reason bitshuffle starts to ignore settings of number of threads in jungfrau utils. Fix from Ivan Usov --- scripts/retrieve_detector_data.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/retrieve_detector_data.sh b/scripts/retrieve_detector_data.sh index dfd8f5f..e67c202 100755 --- a/scripts/retrieve_detector_data.sh +++ b/scripts/retrieve_detector_data.sh @@ -126,6 +126,9 @@ coreAssociatedConversion="35,34,33,32,31,30,29,28,27,26,25,24,23,22,21,20,19,18" #TODO: calculate this number from coreAssociatedConversion #export NUMBA_NUM_THREADS=18 +#not clear why, but bitshuffle doesn't respect OMP_NUM_THREADS set in jungfrau_utils anymore, thus we set it here +export OMP_NUM_THREADS=1 + date3=$(date +%s) echo "Finished : "`date` echo -n "Retrieve Time : " From 897189e82bf472bf37e5de2bd77cfcb99e8719b2 Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Wed, 14 Oct 2020 18:37:35 +0200 Subject: [PATCH 13/61] adapt conversion test script to new environment --- scripts/{test_convertion.sh => test_conversion.sh} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename scripts/{test_convertion.sh => test_conversion.sh} (100%) diff --git a/scripts/test_convertion.sh b/scripts/test_conversion.sh similarity index 100% rename from scripts/test_convertion.sh rename to scripts/test_conversion.sh From 265e7958b54f2ea262a47b7bab556af7d8886325 Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Wed, 14 Oct 2020 18:39:39 +0200 Subject: [PATCH 14/61] adapt conversion test script to new environment --- scripts/test_conversion.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/scripts/test_conversion.sh b/scripts/test_conversion.sh index cce2d27..18567e8 100755 --- a/scripts/test_conversion.sh +++ b/scripts/test_conversion.sh @@ -3,12 +3,13 @@ export PATH=/home/dbe/miniconda3/bin:$PATH source /home/dbe/miniconda3/etc/profile.d/conda.sh conda deactivate -conda activate bsread +conda activate sf-daq +export OMP_NUM_THREADS=1 #export NUMBA_NUM_THREADS=$1 #OUTDIR=/sf/alvra/data/p18674/raw/run_info/003000/CONVERSION-PAR-${NUMBA_NUM_THREADS} -OUTDIR=/sf/alvra/data/p18674/raw/run_info/003000/CONVERSION-NEW.20-daq2 +OUTDIR=/sf/alvra/data/p18674/raw/run_info/003000/CONVERSION-NEW.21-daq1 #coreAssociatedBuffer=(35 34 33 32 31 30 29 28 27 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 8 7 6 5 4 3 2 1 0) #coreAssociatedBuffer=(35 34 33 32 31 30 29 28 27 18 19 20 21 22 23 24 25 26 9 10 11 12 13 14 15 16 17 8 7 6 5 4 3 2 1 0) From 326c108bda45ae398d794e5045769be05dde24df Mon Sep 17 00:00:00 2001 From: babic_a Date: Tue, 27 Oct 2020 19:51:03 +0100 Subject: [PATCH 15/61] usefull messages in case of frame number disagreement --- scripts/JF01-stream.service | 2 +- sf-stream/src/ZmqLiveSender.cpp | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/scripts/JF01-stream.service b/scripts/JF01-stream.service index 79e7b1b..8d2fc35 100644 --- a/scripts/JF01-stream.service +++ b/scripts/JF01-stream.service @@ -8,7 +8,7 @@ User=root ExecStart=/usr/bin/sh /home/writer/git/sf_daq_buffer/scripts/JF01-stream.sh TimeoutStartSec=10 Restart=on-failure -RestartSec=10 +RestartSec=1 [Install] WantedBy=multi-user.target diff --git a/sf-stream/src/ZmqLiveSender.cpp b/sf-stream/src/ZmqLiveSender.cpp index 78cf055..5a2719b 100644 --- a/sf-stream/src/ZmqLiveSender.cpp +++ b/sf-stream/src/ZmqLiveSender.cpp @@ -4,6 +4,8 @@ #include "zmq.h" #include +#include +// using namespace std; using namespace stream_config; @@ -98,6 +100,9 @@ void ZmqLiveSender::send(const ModuleFrameBuffer *meta, const char *data) if (module_metadata.n_recv_packets != 128 ) is_good_frame = false; } + if (pulse_id % 10000 == 0 && is_good_frame != true) { + cout << "Frame is not good " << pulse_id << " module : " << i_module << " frame_index(0) : " << frame_index << " frame_index : " << module_metadata.frame_index << endl; + } } // TODO: Here we need to send to streamvis and live analysis metadata(probably need to operate still on them) and data(not every frame) From 5f161e3c9701d1d213499202f0f6755ed2de7066 Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Tue, 27 Oct 2020 19:57:41 +0100 Subject: [PATCH 16/61] update to a cleanup scripts of buffer --- scripts/clean_buffer.cron | 3 +++ scripts/delete_old_files_in_buffer.sh | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 scripts/clean_buffer.cron diff --git a/scripts/clean_buffer.cron b/scripts/clean_buffer.cron new file mode 100644 index 0000000..b90262b --- /dev/null +++ b/scripts/clean_buffer.cron @@ -0,0 +1,3 @@ +heck every hour if buffer is occupied for larger then 80% and remove all files older then 3 hours +10 * * * * root /home/dbe/git/sf_daq_buffer/scripts/delete_old_files_in_buffer.sh 80 5 + diff --git a/scripts/delete_old_files_in_buffer.sh b/scripts/delete_old_files_in_buffer.sh index ddcfed1..e461708 100755 --- a/scripts/delete_old_files_in_buffer.sh +++ b/scripts/delete_old_files_in_buffer.sh @@ -3,11 +3,11 @@ hours=5 threshold=80 -if [ $# = 1 ] +if [ $# -ge 1 ] then threshold=$1 fi -if [ $# = 2 ] +if [ $# -eq 2 ] then hours=$2 fi From 575437704f2d87648b77cae21968707ab8e87d90 Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Tue, 27 Oct 2020 20:00:15 +0100 Subject: [PATCH 17/61] new laser mode 1:3 --- scripts/make_crystfel_list.py | 31 +++++++++++++++++++++++++++++++ scripts/retrieve_detector_data.sh | 2 +- 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/scripts/make_crystfel_list.py b/scripts/make_crystfel_list.py index 8eb16dc..58df55d 100644 --- a/scripts/make_crystfel_list.py +++ b/scripts/make_crystfel_list.py @@ -26,6 +26,9 @@ def is_it_dark(laser_mode, detector_rate, pulseid): dark = True elif laser_mode == 1: dark = False + elif laser_mode == 13: + if (pulseid % int(100/detector_rate*4)) == 0: + dark = False else: if (pulseid + int(100/detector_rate) ) % dark_rate == 0: dark = True @@ -34,6 +37,17 @@ def is_it_dark(laser_mode, detector_rate, pulseid): return dark +def which_dark(laser_mode, detector_rate, pulseid): + + dark_mode = -1 + if laser_mode != 13: + dark_mode = 0 + else: + for m in range(1,4): + if ((pulseid-m*int(100/detector_rate)) % int(100/detector_rate*4)) == 0: + dark_mode = m + + return dark_mode parser = argparse.ArgumentParser() parser.add_argument("data_file", type=str) @@ -76,6 +90,9 @@ nProcessedFrames = 0 index_dark = [] index_light = [] + +index_dark_mode = {} + for i in range(len(pulseids)): if not is_good_frame[i]: continue @@ -84,6 +101,11 @@ for i in range(len(pulseids)): nProcessedFrames += 1 if is_it_dark(laser_mode, detector_rate, p): index_dark.append(i) + if laser_mode == 13: + dark_mode = which_dark(laser_mode, detector_rate, p) + if dark_mode not in index_dark_mode: + index_dark_mode[dark_mode] = [] + index_dark_mode[dark_mode].append(i) else: index_light.append(i) @@ -112,3 +134,12 @@ if len(index_light) > 0: f_list.close() +for m in index_dark_mode: + if len(index_dark_mode[m]) > 0: + file_dark = f'{data_file[:-3]}.dark{m}.lst' + print(f"List of dark{m} frames : {file_dark} , {len(index_dark_mode[m])} frames") + f_list = open(file_dark, "w") + for frame_number in index_dark_mode[m]: + print(f'{data_file} //{frame_number}', file = f_list) + f_list.close() + diff --git a/scripts/retrieve_detector_data.sh b/scripts/retrieve_detector_data.sh index d697bea..a7cb0b1 100755 --- a/scripts/retrieve_detector_data.sh +++ b/scripts/retrieve_detector_data.sh @@ -61,7 +61,7 @@ case ${DETECTOR} in ;; 'JF06T08V02') NM=8 - DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF06_4M.json + DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF06_4M.daq8.json ;; 'JF07T32V01') NM=32 From fe39841df9132eef4ff9a713f26c31d0ecc15300 Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Wed, 4 Nov 2020 09:23:09 +0100 Subject: [PATCH 18/61] adaptation of scripts to run on sf-daq-4 --- scripts/JF02-buffer-worker.sh | 14 +++++++++----- scripts/JF02-buffer-worker@.service | 2 +- scripts/JF02-buffer.service | 2 +- scripts/JF02-stream.service | 4 ++-- scripts/JF02-stream.sh | 13 ++++++++++--- scripts/JF02-vis.service | 13 +++++++++++++ scripts/JF02-vis.sh | 28 ++++++++++++++++++++++++++++ scripts/sf_daq_buffer.setup.sh | 20 ++++++++++++++++++++ scripts/streamvis_setup.sh | 28 ++++++++++++++++++++++++++++ 9 files changed, 112 insertions(+), 12 deletions(-) create mode 100644 scripts/JF02-vis.service create mode 100644 scripts/JF02-vis.sh create mode 100755 scripts/sf_daq_buffer.setup.sh create mode 100755 scripts/streamvis_setup.sh diff --git a/scripts/JF02-buffer-worker.sh b/scripts/JF02-buffer-worker.sh index e8f0d2b..9c163c3 100644 --- a/scripts/JF02-buffer-worker.sh +++ b/scripts/JF02-buffer-worker.sh @@ -8,14 +8,18 @@ fi M=$1 -# Add ourselves to the user cpuset. -echo $$ > /sys/fs/cgroup/cpuset/user/tasks +H=`echo ${HOSTNAME} | sed 's/.psi.ch//'` -#coreAssociatedBuffer=(25 25 26 26 27 27 28 28 29) -coreAssociatedBuffer=(1 2 2 3 3 4 4 5 5) +case ${H} in +'sf-daq-4') + coreAssociatedBuffer=(11 12 13 14 15 16 17 18 19) + ;; +*) + CORES=(25 25 26 26 27 27 28 28 29) +esac initialUDPport=50020 port=$((${initialUDPport}+10#${M})) DETECTOR=JF02T09V02 -taskset -c ${coreAssociatedBuffer[10#${M}]} /usr/bin/sf_buffer ${DETECTOR} M${M} ${port} /gpfs/photonics/swissfel/buffer/${DETECTOR} ${M} +taskset -c ${coreAssociatedBuffer[10#${M}]} /usr/local/bin/sf_buffer ${DETECTOR} M${M} ${port} /gpfs/photonics/swissfel/buffer/${DETECTOR} ${M} diff --git a/scripts/JF02-buffer-worker@.service b/scripts/JF02-buffer-worker@.service index b175012..895e47e 100644 --- a/scripts/JF02-buffer-worker@.service +++ b/scripts/JF02-buffer-worker@.service @@ -8,7 +8,7 @@ BindsTo=JF02-buffer.service PermissionsStartOnly=true Type=idle User=root -ExecStart=/usr/bin/sh /home/writer/git/sf_daq_buffer/scripts/JF02-buffer-worker.sh %i +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF02-buffer-worker.sh %i TimeoutStartSec=10 RestartSec=10 diff --git a/scripts/JF02-buffer.service b/scripts/JF02-buffer.service index a3b442c..4302f4a 100644 --- a/scripts/JF02-buffer.service +++ b/scripts/JF02-buffer.service @@ -3,7 +3,7 @@ Description=All UDP-buffer instances of JF02 [Service] Type=oneshot -ExecStart=/usr/bin/sh /home/writer/git/sf_daq_buffer/scripts/JF02-buffer-worker.sh +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF02-buffer-worker.sh RemainAfterExit=yes [Install] diff --git a/scripts/JF02-stream.service b/scripts/JF02-stream.service index 27aaaa0..b070345 100644 --- a/scripts/JF02-stream.service +++ b/scripts/JF02-stream.service @@ -5,10 +5,10 @@ Description=stream service (to streamvis and live analysis) of JF02 PermissionsStartOnly=true Type=idle User=root -ExecStart=/usr/bin/sh /home/writer/git/sf_daq_buffer/scripts/JF02-stream.sh +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF02-stream.sh TimeoutStartSec=10 Restart=on-failure -RestartSec=10 +RestartSec=2 [Install] WantedBy=multi-user.target diff --git a/scripts/JF02-stream.sh b/scripts/JF02-stream.sh index 38b113d..832eb3c 100644 --- a/scripts/JF02-stream.sh +++ b/scripts/JF02-stream.sh @@ -1,5 +1,12 @@ #!/bin/bash -echo $$ > /sys/fs/cgroup/cpuset/user/tasks -coreAssociated="33,34,35" -taskset -c ${coreAssociated} /usr/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF02.json +H=`echo ${HOSTNAME} | sed 's/.psi.ch//'` +case ${H} in +'sf-daq-4') + coreAssociated="33,34,35" + ;; +*) + coreAssociated="12" +esac + +taskset -c ${coreAssociated} /usr/local/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF02.json diff --git a/scripts/JF02-vis.service b/scripts/JF02-vis.service new file mode 100644 index 0000000..a80fe66 --- /dev/null +++ b/scripts/JF02-vis.service @@ -0,0 +1,13 @@ +[Unit] +Description=streamvis: JF02 + +[Service] +User=root +TimeoutStartSec=2 +ExecStart=/bin/bash ./home/dbe/service_scripts/JF02-vis.sh +Restart=on-failure +RestartSec=4 + +[Install] +WantedBy=multi-user.target + diff --git a/scripts/JF02-vis.sh b/scripts/JF02-vis.sh new file mode 100644 index 0000000..7325b55 --- /dev/null +++ b/scripts/JF02-vis.sh @@ -0,0 +1,28 @@ +export PATH=/home/dbe/miniconda3/bin:$PATH + +source /home/dbe/miniconda3/etc/profile.d/conda.sh + +conda deactivate +conda activate vis + +PORT=5002 +PORT_BACKEND=9002 + +H=`echo ${HOSTNAME} | sed 's/.psi.ch//'` +BACKEND=${H} + + +case ${H} in +'sf-daq-4') + CORES='36,37' + ;; +*) + CORES='2' +esac + +taskset -c ${CORES} \ +streamvis alvra --allow-websocket-origin=${H}:${PORT} \ +--allow-websocket-origin=sf-daq-alvra:${PORT} --port=${PORT} \ +--address tcp://${BACKEND}:${PORT_BACKEND} \ +--page-title 4p5M_Alvra + diff --git a/scripts/sf_daq_buffer.setup.sh b/scripts/sf_daq_buffer.setup.sh new file mode 100755 index 0000000..828de0b --- /dev/null +++ b/scripts/sf_daq_buffer.setup.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +# needed, otherwise executing with Ansible won't work +# see: https://github.com/conda/conda/issues/7267 +unset SUDO_UID SUDO_GID SUDO_USER + +if [ ! -d /home/dbe/git ]; then + echo "No git repo found, cloning it..." + mkdir /home/dbe/git +fi + +REPO=sf_daq_buffer +if [ ! -d /home/dbe/git/${REPO} ]; then + cd /home/dbe/git && git clone https://github.com/paulscherrerinstitute/${REPO}.git + + source /opt/rh/devtoolset-9/enable + cd /home/dbe/git/${REPO} && mkdir -p build && cd build/ && cmake3 .. && make +fi + + diff --git a/scripts/streamvis_setup.sh b/scripts/streamvis_setup.sh new file mode 100755 index 0000000..346ad3d --- /dev/null +++ b/scripts/streamvis_setup.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +# needed, otherwise executing with Ansible won't work +# see: https://github.com/conda/conda/issues/7267 +unset SUDO_UID SUDO_GID SUDO_USER + +if [ ! -f /home/dbe/miniconda3/bin/conda ] +then + echo "Getting Miniconda" + wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh + sh Miniconda3-latest-Linux-x86_64.sh -b -p /home/dbe/miniconda3 + + rm -rf Miniconda3-latest-Linux-x86_64.sh +fi + +# Setup the conda environment. +export PATH=/home/dbe/miniconda3/bin:$PATH + +source /home/dbe/miniconda3/etc/profile.d/conda.sh + +CONDA_ENV_NAME=vis +envtest=$(conda env list | grep ${CONDA_ENV_NAME}) + +if [ $? != 0 ]; then + echo "Creating the ${CONDA_ENV_NAME} environment" + conda create -n vis -y -c paulscherrerinstitute -c conda-forge streamvis +fi + From 0e001f4daf14e3976c53e6b3fe5361a74bdf581e Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Thu, 5 Nov 2020 21:46:22 +0100 Subject: [PATCH 19/61] standartisation of scripts --- scripts/JF02-buffer-worker.sh | 3 +++ scripts/JF02-buffer-worker@.service | 2 +- scripts/JF02-stream.service | 2 +- scripts/JF02-stream.sh | 7 ++++++- scripts/JF02-vis.sh | 3 +++ scripts/JF06-buffer-worker.sh | 2 +- scripts/JF06-buffer-worker@.service | 4 ++-- scripts/JF06-buffer.service | 2 +- scripts/JF06-stream.service | 2 +- scripts/JF06-stream.sh | 2 +- scripts/JF06-vis.service | 13 +++++++++++++ scripts/JF06-vis.sh | 18 ++++++++++++++++++ scripts/JF06_4M-buffer-worker.sh | 4 ++-- scripts/JF06_4M-buffer-worker@.service | 4 ++-- scripts/JF06_4M-buffer.service | 2 +- scripts/JF06_4M-stream.service | 4 ++-- scripts/JF06_4M-stream.sh | 4 ++-- scripts/JF11-buffer-worker.sh | 17 +++++++++++++++++ scripts/JF11-buffer-worker@.service | 16 ++++++++++++++++ scripts/JF11-buffer.service | 10 ++++++++++ scripts/JF11-stream.service | 15 +++++++++++++++ scripts/JF11-stream.sh | 5 +++++ scripts/JF11-vis.service | 13 +++++++++++++ scripts/JF11-vis.sh | 18 ++++++++++++++++++ 24 files changed, 154 insertions(+), 18 deletions(-) create mode 100644 scripts/JF06-vis.service create mode 100644 scripts/JF06-vis.sh create mode 100644 scripts/JF11-buffer-worker.sh create mode 100644 scripts/JF11-buffer-worker@.service create mode 100644 scripts/JF11-buffer.service create mode 100644 scripts/JF11-stream.service create mode 100644 scripts/JF11-stream.sh create mode 100644 scripts/JF11-vis.service create mode 100644 scripts/JF11-vis.sh diff --git a/scripts/JF02-buffer-worker.sh b/scripts/JF02-buffer-worker.sh index 9c163c3..8ab83c8 100644 --- a/scripts/JF02-buffer-worker.sh +++ b/scripts/JF02-buffer-worker.sh @@ -14,6 +14,9 @@ case ${H} in 'sf-daq-4') coreAssociatedBuffer=(11 12 13 14 15 16 17 18 19) ;; +'sf-daq-8') + coreAssociatedBuffer=(11 11 12 12 13 13 1 1 0) + ;; *) CORES=(25 25 26 26 27 27 28 28 29) esac diff --git a/scripts/JF02-buffer-worker@.service b/scripts/JF02-buffer-worker@.service index 895e47e..55031e4 100644 --- a/scripts/JF02-buffer-worker@.service +++ b/scripts/JF02-buffer-worker@.service @@ -10,7 +10,7 @@ Type=idle User=root ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF02-buffer-worker.sh %i TimeoutStartSec=10 -RestartSec=10 +RestartSec=1 [Install] WantedBy=JF02-buffer.service diff --git a/scripts/JF02-stream.service b/scripts/JF02-stream.service index b070345..0cd706e 100644 --- a/scripts/JF02-stream.service +++ b/scripts/JF02-stream.service @@ -8,7 +8,7 @@ User=root ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF02-stream.sh TimeoutStartSec=10 Restart=on-failure -RestartSec=2 +RestartSec=1 [Install] WantedBy=multi-user.target diff --git a/scripts/JF02-stream.sh b/scripts/JF02-stream.sh index 832eb3c..0df4e74 100644 --- a/scripts/JF02-stream.sh +++ b/scripts/JF02-stream.sh @@ -4,9 +4,14 @@ H=`echo ${HOSTNAME} | sed 's/.psi.ch//'` case ${H} in 'sf-daq-4') coreAssociated="33,34,35" + config=/gpfs/photonics/swissfel/buffer/config/stream-JF02.json + ;; +'sf-daq-8') + coreAssociated="14,15,16" + config=/gpfs/photonics/swissfel/buffer/config/stream-JF02.daq8.json ;; *) coreAssociated="12" esac -taskset -c ${coreAssociated} /usr/local/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF02.json +taskset -c ${coreAssociated} /usr/local/bin/sf_stream ${config} diff --git a/scripts/JF02-vis.sh b/scripts/JF02-vis.sh index 7325b55..2aa7816 100644 --- a/scripts/JF02-vis.sh +++ b/scripts/JF02-vis.sh @@ -16,6 +16,9 @@ case ${H} in 'sf-daq-4') CORES='36,37' ;; +'sf-daq-8') + CORES='17,18' + ;; *) CORES='2' esac diff --git a/scripts/JF06-buffer-worker.sh b/scripts/JF06-buffer-worker.sh index bac2bdb..bf9691b 100644 --- a/scripts/JF06-buffer-worker.sh +++ b/scripts/JF06-buffer-worker.sh @@ -18,4 +18,4 @@ initialUDPport=50060 port=$((${initialUDPport}+10#${M})) DETECTOR=JF06T32V02 -taskset -c ${coreAssociatedBuffer[10#${M}]} /usr/bin/sf_buffer ${DETECTOR} M${M} ${port} /gpfs/photonics/swissfel/buffer/${DETECTOR} ${M} +taskset -c ${coreAssociatedBuffer[10#${M}]} /usr/local/bin/sf_buffer ${DETECTOR} M${M} ${port} /gpfs/photonics/swissfel/buffer/${DETECTOR} ${M} diff --git a/scripts/JF06-buffer-worker@.service b/scripts/JF06-buffer-worker@.service index 44ab481..c0380f1 100644 --- a/scripts/JF06-buffer-worker@.service +++ b/scripts/JF06-buffer-worker@.service @@ -8,9 +8,9 @@ BindsTo=JF06-buffer.service PermissionsStartOnly=true Type=idle User=root -ExecStart=/usr/bin/sh /home/writer/git/sf_daq_buffer/scripts/JF06-buffer-worker.sh %i +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF06-buffer-worker.sh %i TimeoutStartSec=10 -RestartSec=10 +RestartSec=1 [Install] WantedBy=JF06-buffer.service diff --git a/scripts/JF06-buffer.service b/scripts/JF06-buffer.service index eba84ca..4195d8e 100644 --- a/scripts/JF06-buffer.service +++ b/scripts/JF06-buffer.service @@ -3,7 +3,7 @@ Description=All UDP-buffer instances of JF06 [Service] Type=oneshot -ExecStart=/usr/bin/sh /home/writer/git/sf_daq_buffer/scripts/JF06-buffer-worker.sh +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF06-buffer-worker.sh RemainAfterExit=yes [Install] diff --git a/scripts/JF06-stream.service b/scripts/JF06-stream.service index 9c5b2c6..d4fdd80 100644 --- a/scripts/JF06-stream.service +++ b/scripts/JF06-stream.service @@ -5,7 +5,7 @@ Description=stream service (to streamvis and live analysis) of JF06 PermissionsStartOnly=true Type=idle User=root -ExecStart=/usr/bin/sh /home/writer/git/sf_daq_buffer/scripts/JF06-stream.sh +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF06-stream.sh TimeoutStartSec=10 Restart=on-failure RestartSec=1 diff --git a/scripts/JF06-stream.sh b/scripts/JF06-stream.sh index c778845..9a90023 100644 --- a/scripts/JF06-stream.sh +++ b/scripts/JF06-stream.sh @@ -3,4 +3,4 @@ coreAssociated="2,3,4,5" #echo $$ > /sys/fs/cgroup/cpuset/user/tasks -taskset -c ${coreAssociated} /usr/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF06.json +taskset -c ${coreAssociated} /usr/local/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF06.daq8.json diff --git a/scripts/JF06-vis.service b/scripts/JF06-vis.service new file mode 100644 index 0000000..ff6b509 --- /dev/null +++ b/scripts/JF06-vis.service @@ -0,0 +1,13 @@ +[Unit] +Description=streamvis: JF06 + +[Service] +User=root +TimeoutStartSec=2 +ExecStart=/bin/bash ./home/dbe/service_scripts/JF06-vis.sh +Restart=on-failure +RestartSec=4 + +[Install] +WantedBy=multi-user.target + diff --git a/scripts/JF06-vis.sh b/scripts/JF06-vis.sh new file mode 100644 index 0000000..829531b --- /dev/null +++ b/scripts/JF06-vis.sh @@ -0,0 +1,18 @@ +export PATH=/home/dbe/miniconda3/bin:$PATH + +source /home/dbe/miniconda3/etc/profile.d/conda.sh + +conda deactivate +conda activate vis + +PORT=5006 +PORT_BACKEND=9006 + +H=`echo ${HOSTNAME} | sed 's/.psi.ch//'` +BACKEND=${H} + +taskset -c 19,20 \ +streamvis default16m --allow-websocket-origin=${H}:${PORT} --allow-websocket-origin=sf-daq-alvra:${PORT} \ +--port=${PORT} --address tcp://${BACKEND}:${PORT_BACKEND} \ +--page-title 16M_Jungfrau_Alvra + diff --git a/scripts/JF06_4M-buffer-worker.sh b/scripts/JF06_4M-buffer-worker.sh index c596d9d..6d0c062 100644 --- a/scripts/JF06_4M-buffer-worker.sh +++ b/scripts/JF06_4M-buffer-worker.sh @@ -11,10 +11,10 @@ M=$1 # Add ourselves to the user cpuset. # echo $$ > /sys/fs/cgroup/cpuset/user/tasks -coreAssociatedBuffer=(22 23 24 25 26 27 28 29) +coreAssociatedBuffer=(6 7 8 9 10 22 23 24) initialUDPport=50060 port=$((${initialUDPport}+10#${M})) DETECTOR=JF06T08V02 -taskset -c ${coreAssociatedBuffer[10#${M}]} /usr/bin/sf_buffer ${DETECTOR} M${M} ${port} /gpfs/photonics/swissfel/buffer/${DETECTOR} ${M} +taskset -c ${coreAssociatedBuffer[10#${M}]} /usr/local/bin/sf_buffer ${DETECTOR} M${M} ${port} /gpfs/photonics/swissfel/buffer/${DETECTOR} ${M} diff --git a/scripts/JF06_4M-buffer-worker@.service b/scripts/JF06_4M-buffer-worker@.service index f83f2c0..9960018 100644 --- a/scripts/JF06_4M-buffer-worker@.service +++ b/scripts/JF06_4M-buffer-worker@.service @@ -8,9 +8,9 @@ BindsTo=JF06_4M-buffer.service PermissionsStartOnly=true Type=idle User=root -ExecStart=/usr/bin/sh /home/writer/git/sf_daq_buffer/scripts/JF06_4M-buffer-worker.sh %i +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF06_4M-buffer-worker.sh %i TimeoutStartSec=10 -RestartSec=10 +RestartSec=1 [Install] WantedBy=JF06_4M-buffer.service diff --git a/scripts/JF06_4M-buffer.service b/scripts/JF06_4M-buffer.service index a3116e8..41d5610 100644 --- a/scripts/JF06_4M-buffer.service +++ b/scripts/JF06_4M-buffer.service @@ -3,7 +3,7 @@ Description=All UDP-buffer instances of JF06(4M mode) [Service] Type=oneshot -ExecStart=/usr/bin/sh /home/writer/git/sf_daq_buffer/scripts/JF06_4M-buffer-worker.sh +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF06_4M-buffer-worker.sh RemainAfterExit=yes [Install] diff --git a/scripts/JF06_4M-stream.service b/scripts/JF06_4M-stream.service index 4526f8f..bd332ab 100644 --- a/scripts/JF06_4M-stream.service +++ b/scripts/JF06_4M-stream.service @@ -5,10 +5,10 @@ Description=stream service (to streamvis and live analysis) of JF06 (4M mode) PermissionsStartOnly=true Type=idle User=root -ExecStart=/usr/bin/sh /home/writer/git/sf_daq_buffer/scripts/JF06_4M-stream.sh +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF06_4M-stream.sh TimeoutStartSec=10 Restart=on-failure -RestartSec=10 +RestartSec=1 [Install] WantedBy=multi-user.target diff --git a/scripts/JF06_4M-stream.sh b/scripts/JF06_4M-stream.sh index c5359b5..21966d4 100644 --- a/scripts/JF06_4M-stream.sh +++ b/scripts/JF06_4M-stream.sh @@ -1,5 +1,5 @@ #!/bin/bash -coreAssociated="13,14,15,16" +coreAssociated="2,3,4,5" -taskset -c ${coreAssociated} /usr/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF06_4M.json +taskset -c ${coreAssociated} /usr/local/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF06_4M.daq8.json diff --git a/scripts/JF11-buffer-worker.sh b/scripts/JF11-buffer-worker.sh new file mode 100644 index 0000000..39dccca --- /dev/null +++ b/scripts/JF11-buffer-worker.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +if [ $# != 1 ] +then + systemctl start JF11-buffer-worker@{00..03} + exit +fi + +M=$1 + +coreAssociatedBuffer=(11 12 13 1) + +initialUDPport=50170 +port=$((${initialUDPport}+10#${M})) +DETECTOR=JF11T04V01 + +taskset -c ${coreAssociatedBuffer[10#${M}]} /usr/local/bin/sf_buffer ${DETECTOR} M${M} ${port} /gpfs/photonics/swissfel/buffer/${DETECTOR} ${M} diff --git a/scripts/JF11-buffer-worker@.service b/scripts/JF11-buffer-worker@.service new file mode 100644 index 0000000..0c19154 --- /dev/null +++ b/scripts/JF11-buffer-worker@.service @@ -0,0 +1,16 @@ +[Unit] +Description=JF11 UDP2buffer worker instance as a service, instance %i +Requires=JF11-buffer.service +Before=JF11-buffer.service +BindsTo=JF11-buffer.service + +[Service] +PermissionsStartOnly=true +Type=idle +User=root +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF11-buffer-worker.sh %i +TimeoutStartSec=10 +RestartSec=1 + +[Install] +WantedBy=JF11-buffer.service diff --git a/scripts/JF11-buffer.service b/scripts/JF11-buffer.service new file mode 100644 index 0000000..8730947 --- /dev/null +++ b/scripts/JF11-buffer.service @@ -0,0 +1,10 @@ +[Unit] +Description=All UDP-buffer instances of JF11 + +[Service] +Type=oneshot +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF11-buffer-worker.sh +RemainAfterExit=yes + +[Install] +WantedBy=multi-user.target diff --git a/scripts/JF11-stream.service b/scripts/JF11-stream.service new file mode 100644 index 0000000..4743c94 --- /dev/null +++ b/scripts/JF11-stream.service @@ -0,0 +1,15 @@ +[Unit] +Description=stream service (to streamvis and live analysis) of JF11(TXS Flex) + +[Service] +PermissionsStartOnly=true +Type=idle +User=root +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF11-stream.sh +TimeoutStartSec=10 +Restart=on-failure +RestartSec=1 + +[Install] +WantedBy=multi-user.target + diff --git a/scripts/JF11-stream.sh b/scripts/JF11-stream.sh new file mode 100644 index 0000000..e5fb95f --- /dev/null +++ b/scripts/JF11-stream.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +coreAssociated="14,15,16" + +taskset -c ${coreAssociated} /usr/local/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF11.json diff --git a/scripts/JF11-vis.service b/scripts/JF11-vis.service new file mode 100644 index 0000000..82a1698 --- /dev/null +++ b/scripts/JF11-vis.service @@ -0,0 +1,13 @@ +[Unit] +Description=streamvis: JF11 + +[Service] +User=root +TimeoutStartSec=2 +ExecStart=/bin/bash ./home/dbe/service_scripts/JF11-vis.sh +Restart=on-failure +RestartSec=4 + +[Install] +WantedBy=multi-user.target + diff --git a/scripts/JF11-vis.sh b/scripts/JF11-vis.sh new file mode 100644 index 0000000..22b2b14 --- /dev/null +++ b/scripts/JF11-vis.sh @@ -0,0 +1,18 @@ +export PATH=/home/dbe/miniconda3/bin:$PATH + +source /home/dbe/miniconda3/etc/profile.d/conda.sh + +conda deactivate +conda activate vis + +PORT=5011 +PORT_BACKEND=9011 + +H=`echo ${HOSTNAME} | sed 's/.psi.ch//'` +BACKEND=${H} + +taskset -c 17,18 \ +streamvis alvra --allow-websocket-origin=${H}:${PORT} \ +--allow-websocket-origin=sf-daq-alvra:${PORT} --port=${PORT} \ +--address tcp://${BACKEND}:${PORT_BACKEND} \ +--page-title TXS_Flex From fd3c86fb8dbb616283c61b092715cd5a8ad1f467 Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Thu, 5 Nov 2020 22:13:18 +0100 Subject: [PATCH 20/61] standartisation of scripts, JF01 --- scripts/JF01-buffer-worker.sh | 5 +---- scripts/JF01-buffer-worker@.service | 2 +- scripts/JF01-buffer.service | 2 +- scripts/JF01-stream.service | 2 +- scripts/JF01-stream.sh | 2 +- scripts/JF01-vis.service | 13 +++++++++++++ scripts/JF01-vis.sh | 19 +++++++++++++++++++ 7 files changed, 37 insertions(+), 8 deletions(-) create mode 100644 scripts/JF01-vis.service create mode 100644 scripts/JF01-vis.sh diff --git a/scripts/JF01-buffer-worker.sh b/scripts/JF01-buffer-worker.sh index b907af5..e9a101e 100644 --- a/scripts/JF01-buffer-worker.sh +++ b/scripts/JF01-buffer-worker.sh @@ -8,13 +8,10 @@ fi M=$1 -# Add ourselves to the user cpuset. -# echo $$ > /sys/fs/cgroup/cpuset/user/tasks - coreAssociatedBuffer=(12 12 12) initialUDPport=50010 port=$((${initialUDPport}+10#${M})) DETECTOR=JF01T03V01 -taskset -c ${coreAssociatedBuffer[10#${M}]} /usr/bin/sf_buffer ${DETECTOR} M${M} ${port} /gpfs/photonics/swissfel/buffer/${DETECTOR} ${M} +taskset -c ${coreAssociatedBuffer[10#${M}]} /usr/local/bin/sf_buffer ${DETECTOR} M${M} ${port} /gpfs/photonics/swissfel/buffer/${DETECTOR} ${M} diff --git a/scripts/JF01-buffer-worker@.service b/scripts/JF01-buffer-worker@.service index b8b6a97..afe7806 100644 --- a/scripts/JF01-buffer-worker@.service +++ b/scripts/JF01-buffer-worker@.service @@ -8,7 +8,7 @@ BindsTo=JF01-buffer.service PermissionsStartOnly=true Type=idle User=root -ExecStart=/usr/bin/sh /home/writer/git/sf_daq_buffer/scripts/JF01-buffer-worker.sh %i +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF01-buffer-worker.sh %i TimeoutStartSec=10 RestartSec=10 diff --git a/scripts/JF01-buffer.service b/scripts/JF01-buffer.service index efdc14d..335d8c2 100644 --- a/scripts/JF01-buffer.service +++ b/scripts/JF01-buffer.service @@ -3,7 +3,7 @@ Description=All UDP-buffer instances of JF01 [Service] Type=oneshot -ExecStart=/usr/bin/sh /home/writer/git/sf_daq_buffer/scripts/JF01-buffer-worker.sh +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF01-buffer-worker.sh RemainAfterExit=yes [Install] diff --git a/scripts/JF01-stream.service b/scripts/JF01-stream.service index 8d2fc35..58a7d69 100644 --- a/scripts/JF01-stream.service +++ b/scripts/JF01-stream.service @@ -5,7 +5,7 @@ Description=stream service (to streamvis and live analysis) of JF01 PermissionsStartOnly=true Type=idle User=root -ExecStart=/usr/bin/sh /home/writer/git/sf_daq_buffer/scripts/JF01-stream.sh +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF01-stream.sh TimeoutStartSec=10 Restart=on-failure RestartSec=1 diff --git a/scripts/JF01-stream.sh b/scripts/JF01-stream.sh index c7d1724..85ed846 100644 --- a/scripts/JF01-stream.sh +++ b/scripts/JF01-stream.sh @@ -2,4 +2,4 @@ coreAssociated="24" -taskset -c ${coreAssociated} /usr/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF01.json +taskset -c ${coreAssociated} /usr/local/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF01.json diff --git a/scripts/JF01-vis.service b/scripts/JF01-vis.service new file mode 100644 index 0000000..1eb1ac3 --- /dev/null +++ b/scripts/JF01-vis.service @@ -0,0 +1,13 @@ +[Unit] +Description=streamvis: JF01 + +[Service] +User=root +TimeoutStartSec=2 +ExecStart=/bin/bash ./home/dbe/service_scripts/JF01-vis.sh +Restart=on-failure +RestartSec=4 + +[Install] +WantedBy=multi-user.target + diff --git a/scripts/JF01-vis.sh b/scripts/JF01-vis.sh new file mode 100644 index 0000000..d74b688 --- /dev/null +++ b/scripts/JF01-vis.sh @@ -0,0 +1,19 @@ +export PATH=/home/dbe/miniconda3/bin:$PATH + +source /home/dbe/miniconda3/etc/profile.d/conda.sh + +conda deactivate +conda activate vis + +PORT=5001 +PORT_BACKEND=9001 + +H=`echo ${HOSTNAME} | sed 's/.psi.ch//'` +BACKEND=${H} + +taskset -c 17,18 \ +streamvis bernina --allow-websocket-origin=${H}:${PORT} \ +--allow-websocket-origin=sf-daq-bernina:${PORT} --port=${PORT} \ +--address tcp://${BACKEND}:${PORT_BACKEND} \ +--page-title 1p5M + From a131f0ca6286d7f7f669ea3539f8ae0da885ccd4 Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Mon, 9 Nov 2020 19:34:05 +0100 Subject: [PATCH 21/61] JF11 configuration for retreive --- scripts/retrieve_detector_data.sh | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/scripts/retrieve_detector_data.sh b/scripts/retrieve_detector_data.sh index bb3c5e3..1583a40 100755 --- a/scripts/retrieve_detector_data.sh +++ b/scripts/retrieve_detector_data.sh @@ -71,6 +71,10 @@ case ${DETECTOR} in NM=1 DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF13.json ;; +'JF11T04V01') + NM=4 + DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF11.json + ;; *) NM=1 esac From c64299481ea87b50f89ccaec89af445c6ba2fece Mon Sep 17 00:00:00 2001 From: Ivan Usov Date: Tue, 17 Nov 2020 15:15:36 +0100 Subject: [PATCH 22/61] Adapt export_file.py to use jungfrau_utils/1.3 --- scripts/export_file.py | 19 +++++-------------- 1 file changed, 5 insertions(+), 14 deletions(-) diff --git a/scripts/export_file.py b/scripts/export_file.py index b0deccd..4aa0388 100644 --- a/scripts/export_file.py +++ b/scripts/export_file.py @@ -71,20 +71,12 @@ with ju.File( batch_size=35, ) - pixel_mask = juf.handler.get_pixel_mask(gap_pixels=gap_pixels, geometry=geometry) +# Utility info +with h5py.File(args.file_out, "r") as h5f: + print("daq_rec:", h5f[f"/data/{detector_name}/daq_rec"][0, 0]) -# Postprocessing -with h5py.File(args.file_out, "r+") as h5f: - h5f[f"/data/{detector_name}/pixel_mask"] = np.invert(pixel_mask) - if conversion: - print("daq_rec:", h5f[f"/data/{detector_name}/daq_rec"][0, 0]) - del h5f[f"/data/{detector_name}/daq_rec"] - - frame_index = h5f[f"/data/{detector_name}/frame_index"][:] - print("frame_index range:", (np.min(frame_index), np.max(frame_index))) - del h5f[f"/data/{detector_name}/frame_index"] - - del h5f[f"/data/{detector_name}/is_good_frame"] + frame_index = h5f[f"/data/{detector_name}/frame_index"][:] + print("frame_index range:", (np.min(frame_index), np.max(frame_index))) print("input frames:", n_input_frames) print("bad frames:", n_input_frames - n_output_frames) @@ -99,4 +91,3 @@ print("geometry:", geometry) print("gap_pixels:", gap_pixels) print("compression:", compression) print("factor:", factor) - From 5dd111bde35090fbc22dc13a95c58fcda5254a49 Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Fri, 20 Nov 2020 11:50:34 +0100 Subject: [PATCH 23/61] changes in jungfrau header package for 5.0.0 slsDetector version --- core-buffer/include/jungfrau.hpp | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/core-buffer/include/jungfrau.hpp b/core-buffer/include/jungfrau.hpp index 8e9e333..99156e6 100644 --- a/core-buffer/include/jungfrau.hpp +++ b/core-buffer/include/jungfrau.hpp @@ -4,16 +4,15 @@ #include #define JUNGFRAU_N_MODULES 32 -#define JUNGFRAU_BYTES_PER_PACKET 8246 +#define JUNGFRAU_BYTES_PER_PACKET 8240 #define JUNGFRAU_DATA_BYTES_PER_PACKET 8192 #define JF_N_PACKETS_PER_FRAME 128 #define JUNGFRAU_DATA_BYTES_PER_FRAME 1048576 -// 6 bytes + 48 bytes + 8192 bytes = 8246 bytes +// 48 bytes + 8192 bytes = 8240 bytes #pragma pack(push) #pragma pack(2) struct jungfrau_packet { - char emptyheader[6]; uint64_t framenum; uint32_t exptime; uint32_t packetnum; @@ -35,4 +34,4 @@ struct jungfrau_packet { #pragma pack(pop) -#endif \ No newline at end of file +#endif From 78e31e0ef63d0dc30e2ae57089c050ad515445fd Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Tue, 1 Dec 2020 22:04:38 +0100 Subject: [PATCH 24/61] automatically convert pedestal files --- scripts/jungfrau_create_pedestals.py | 225 +++++++++++++++++++++++++++ scripts/retrieve_detector_data.sh | 57 +++++++ 2 files changed, 282 insertions(+) create mode 100644 scripts/jungfrau_create_pedestals.py diff --git a/scripts/jungfrau_create_pedestals.py b/scripts/jungfrau_create_pedestals.py new file mode 100644 index 0000000..1dcb9e4 --- /dev/null +++ b/scripts/jungfrau_create_pedestals.py @@ -0,0 +1,225 @@ +import argparse +import sys +import os +import numpy as np +import h5py +import logging + +ch = logging.StreamHandler() +ch.setFormatter(logging.Formatter('[%(levelname)s] %(message)s')) + +log = logging.getLogger("create_pedestals") +log.addHandler(ch) + + +def h5_printname(name): + print(" {}".format(name)) + + +def forcedGainValue(i, n0, n1, n2, n3): + if i <= n0 - 1: + return 0 + if i <= (n0 + n1) - 1: + return 1 + if i <= (n0 + n1 + n2) - 1: + return 3 + if i <= (n0 + n1 + n2 + n3) - 1: + return 4 + return 2 + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("--verbosity", default=None, help="log verbosity level INFO/DEBUG/WARN/ERROR/CRITICAL") + parser.add_argument("--filename", default="pedestal.h5", help="pedestal file") + parser.add_argument("--X_test_pixel", type=int, default=0, help="x position of the test pixel") + parser.add_argument("--Y_test_pixel", type=int, default=0, help="y position of the test pixel") + parser.add_argument("--nFramesPede", type=int, default=1000, help="number of pedestal frames to average pedestal value") + parser.add_argument("--frames_G0", type=int, default=0, help="force to treat pedestal run as first frames_G0 taken in gain0, then frames_G1 in gain1, and frames_G2 in gain2 and HG0") + parser.add_argument("--frames_G1", type=int, default=0, help="force to treat pedestal run as first frames_G0 taken in gain0, then frames_G1 in gain1, and frames_G2 in gain2 and HG0") + parser.add_argument("--frames_G2", type=int, default=0, help="force to treat pedestal run as first frames_G0 taken in gain0, then frames_G1 in gain1, and frames_G2 in gain2 and HG0") + parser.add_argument("--frames_HG0", type=int, default=0, help="force to treat pedestal run as first frames_G0 taken in gain0, then frames_G1 in gain1, and frames_G2 in gain2 and HG0") + parser.add_argument("--number_frames", type=int, default=1000000, help="analyze only first number_frames frames") + parser.add_argument("--frames_average", type=int, default=1000, help="for pedestal in each gain average over last frames_average frames, reducing weight of previous") + parser.add_argument("--directory", default="./", help="Output directory where to store pixelmask and gain file") + parser.add_argument("--gain_check", type=int, default=1, help="check that gain setting in each of the module corresponds to the general gain switch, (0 - dont check)") + parser.add_argument("--add_pixel_mask", default=None, help="add additional masked pixels from external, specified file") + parser.add_argument("--number_bad_modules", type=int, default=0, help="Number of bad modules in detector") + args = parser.parse_args() + + if not (os.path.isfile(args.filename) and os.access(args.filename, os.R_OK)): + print("Pedestal file {} not found, exit".format(args.filename)) + exit() + + if args.verbosity: + log.setLevel(getattr(logging, args.verbosity.upper(), None)) + + overwriteGain = False + if (args.frames_G0 + args.frames_G1 + args.frames_G2) > 0: + log.info("Treat this run as taken with {} frames in gain0, then {} frames in gain1 and {} frames in gain2".format(args.frames_G0, args.frames_G1, args.frames_G2)) + overwriteGain = True + + f = h5py.File(args.filename, "r") + + #detector_name = (f.get("general/detector_name").value).decode('UTF-8') + detector_name = (f.get("general/detector_name")[()]).decode('UTF-8') + #n_bad_modules = f.get("general/n_bad_modules").value + n_bad_modules = args.number_bad_modules + + data_location = "data/" + detector_name + "/data" + daq_recs_location = "data/" + detector_name + "/daq_rec" + is_good_frame_location = "data/" + detector_name + "/is_good_frame" + + + numberOfFrames = len(f[data_location]) + (sh_y, sh_x) = f[data_location][0].shape + nModules = (sh_x * sh_y) // (1024 * 512) + if (nModules * 1024 * 512) != (sh_x * sh_y): + log.error(" {} : Something very strange in the data, Jungfrau consists of (1024x512) modules, while data has {}x{}".format(detector_name, sh_x, sh_y)) + exit() + + (tX, tY) = (args.X_test_pixel, args.Y_test_pixel) + if tX < 0 or tX > (sh_x - 1): + tX = 0 + if tY < 0 or tY > (sh_y - 1): + tY = 0 + + log.debug(" {} : test pixel is ( x y ): {}x{}".format(detector_name, tX, tY)) + log.info(" {} : In pedestal file {} there are {} frames".format(detector_name, args.filename, numberOfFrames + 1)) +# log.debug("Following groups are available:") +# if args.verbosity >= 3: +# f.visit(h5_printname) + log.debug(" {} : data has the following shape: {}, type: {}, {} modules ({} bad modules)".format(detector_name, f[data_location][0].shape, f[data_location][0].dtype, nModules, n_bad_modules)) + + pixelMask = np.zeros((sh_y, sh_x), dtype=np.int) + + adcValuesN = np.zeros((5, sh_y, sh_x)) + adcValuesNN = np.zeros((5, sh_y, sh_x)) + + + averagePedestalFrames = args.frames_average + + nMgain = [0] * 5 + + gainCheck = -1 + highG0Check = 0 + printFalseGain = False + nGoodFrames = 0 + nGoodFramesGain = 0 + + analyzeFrames = min(numberOfFrames, args.number_frames) + + for n in range(analyzeFrames): + + if not f[is_good_frame_location][n]: + continue + + nGoodFrames += 1 + + daq_rec = (f[daq_recs_location][n])[0] + + image = f[data_location][n][:] + frameData = (np.bitwise_and(image, 0b0011111111111111)) + gainData = np.bitwise_and(image, 0b1100000000000000) >> 14 + trueGain = forcedGainValue(n, args.framesG0, args.framesG1, args.framesG2, args.framesHG0) if overwriteGain else ( (daq_rec & 0b11000000000000) >> 12 ) + highG0 = (daq_rec & 0b1) + + gainGoodAllModules = True + if args.gain_check > 0: + daq_recs = f[daq_recs_location][n] + for i in range(len(daq_recs)): + if trueGain != ((daq_recs[i] & 0b11000000000000) >> 12) or highG0 != (daq_recs[i] & 0b1): + gainGoodAllModules = False + + if highG0 == 1 and trueGain != 0: + gainGoodAllModules = False + log.info(" {} : Jungfrau is in the high G0 mode ({}), but gain settings is strange: {}".format( detector_name, highG0, trueGain)) + + nFramesGain = np.sum(gainData==(trueGain)) + if nFramesGain < (nModules - 0.5 - n_bad_modules) * (1024 * 512): # make sure that most are the modules are in correct gain + gainGoodAllModules = False + log.debug(" {} : Too many bad pixels, skip the frame {}, true gain: {}(highG0: {}) ({}); gain0 : {}; gain1 : {}; gain2 : {}; undefined gain : {}".format( detector_name, n, trueGain, highG0, nFramesGain, np.sum(gainData==0), np.sum(gainData==1), np.sum(gainData==3), np.sum(gainData==2))) + + if not gainGoodAllModules: + log.debug(" {} : In Frame Number {} : mismatch in modules and general settings, Gain: {} vs {}; HighG0: {} vs {} (or too many bad pixels)".format( detector_name, n, trueGain, ((daq_recs & 0b11000000000000) >> 12), highG0, (daq_recs & 0b1))) + continue + nGoodFramesGain += 1 + + if gainData[tY][tX] != trueGain: + if not printFalseGain: + log.info(" {} : Gain wrong for channel ({}x{}) should be {}, but {}. Frame {}. {} {}".format( detector_name, tX, tY, trueGain, gainData[tY][tX], n, trueGain, daq_rec)) + printFalseGain = True + else: + if gainCheck != -1 and printFalseGain: + log.info(" {} : Gain was wrong for channel ({}x{}) in previous frames, but now correct : {}. Frame {}.".format( detector_name, tX, tY, gainData[tY, tX], n)) + printFalseGain = False + + if gainData[tY][tX] != gainCheck or highG0Check != highG0: + log.info(" {} : Gain changed for ({}x{}) channel {} -> {} (highG0 setting: {} -> {}), frame number {}, match: {}".format( detector_name, tX, tY, gainCheck, gainData[tY][tX], highG0Check, highG0, n, gainData[tY][tX] == trueGain)) + gainCheck = gainData[tY][tX] + highG0Check = highG0 + + if gainGoodAllModules: + + pixelMask[gainData != trueGain] |= (1 << (trueGain+4*highG0)) + + trueGain += 4 * highG0 + + + nMgain[trueGain] += 1 + + if nMgain[trueGain] > averagePedestalFrames: + adcValuesN[trueGain] -= adcValuesN[trueGain] / averagePedestalFrames + adcValuesNN[trueGain] -= adcValuesNN[trueGain] / averagePedestalFrames + + adcValuesN[trueGain] += frameData + adcValuesNN[trueGain] += np.float_power(frameData, 2) + + + log.info(" {} : {} frames analyzed, {} good frames, {} frames without settings mismatch. Gain frames distribution (0,1,2,3,HG0) : ({})".format( detector_name, analyzeFrames, nGoodFrames, nGoodFramesGain, nMgain)) + + if args.add_pixel_mask != None: + if (os.path.isfile(args.add_pixel_mask) and os.access(args.add_pixel_mask, os.R_OK)): + additional_pixel_mask_file = h5py.File(args.add_pixel_mask, "r") + additional_pixel_mask = np.array(additional_pixel_mask_file["pixel_mask"]) + log.info("Will add additional masked pixels from file %s , number %d " % (args.add_pixel_mask, np.sum(additional_pixel_mask == 1))) + if additional_pixel_mask.shape == pixelMask.shape: + pixelMask[additional_pixel_mask == 1] |= (1 << 5) + else: + log.error(" shape of additional pixel mask ({}) doesn't match current ({})".format( additional_pixel_mask.shape, pixelMask.shape)) + else: + log.error(" Specified addition file with pixel mask not found or not reachable {}".format( args.add_pixel_mask)) + + fileNameIn = os.path.splitext(os.path.basename(args.filename))[0] + full_fileNameOut = args.directory + "/" + fileNameIn + ".res.h5" + log.info(" {} : Output file with pedestal corrections in: {}".format( detector_name, full_fileNameOut)) + outFile = h5py.File(full_fileNameOut, "w") + + gains = [None] * 4 + gainsRMS = [None] * 4 + + for gain in range(5): + numberFramesAverage = max(1, min(averagePedestalFrames, nMgain[gain])) + mean = adcValuesN[gain] / float(numberFramesAverage) + mean2 = adcValuesNN[gain] / float(numberFramesAverage) + variance = mean2 - np.float_power(mean, 2) + stdDeviation = np.sqrt(variance) + log.debug(" {} : gain {} values results (pixel ({},{}) : {} {}".format( detector_name, gain, tY, tX, mean[tY][tX], stdDeviation[tY][tX])) + if gain != 2: + g = gain if gain < 3 else (gain-1) + gains[g] = mean + gainsRMS[g] = stdDeviation + + pixelMask[np.isclose(stdDeviation,0)] |= (1 << (6 + g)) + + dset = outFile.create_dataset('pixel_mask', data=pixelMask) + dset = outFile.create_dataset('gains', data=gains) + dset = outFile.create_dataset('gainsRMS', data=gainsRMS) + + outFile.close() + + log.info(" {} : Number of good pixels: {} from {} in total ({} bad pixels)".format( detector_name, np.sum(pixelMask == 0), sh_x * sh_y, (sh_x * sh_y - np.sum(pixelMask == 0)))) + + +if __name__ == "__main__": + main() diff --git a/scripts/retrieve_detector_data.sh b/scripts/retrieve_detector_data.sh index 1583a40..f1c7822 100755 --- a/scripts/retrieve_detector_data.sh +++ b/scripts/retrieve_detector_data.sh @@ -55,6 +55,10 @@ case ${DETECTOR} in NM=9 DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF02.json ;; +'JF04T01V01') + NM=1 + DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF04.json + ;; 'JF06T32V02') NM=32 DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF06.json @@ -67,6 +71,14 @@ case ${DETECTOR} in NM=32 DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF07.json ;; +'JF09T01V01') + NM=1 + DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF09.json + ;; +'JF10T01V01') + NM=1 + DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF10.json + ;; 'JF13T01V01') NM=1 DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF13.json @@ -141,6 +153,51 @@ echo $((date3-date2)) | awk '{print int($1/60)":"int($1%60)}' if [ ${JF_CONVERSION} == 0 ] then echo "File is written in raw format, no compression" + + dir_name=`dirname ${OUTFILE_RAW}` + base_name=`basename ${dir_name}` + + if [ ${base_name} == "JF_pedestals" ] + then + echo "Pedestal run will make conversion" + + export PATH=/home/dbe/miniconda3/bin:$PATH + + source /home/dbe/miniconda3/etc/profile.d/conda.sh + + conda deactivate + conda activate sf-daq + + if [ ${DETECTOR} == "JF07T32V01" ] + then + time python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --add_pixel_mask /sf/bernina/config/jungfrau/pixel_mask/JF07T32V01/pixel_mask_13_full.h5 + elif [ ${DETECTOR} == "JF03T01V02" ] + then + time python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --add_pixel_mask /sf/bernina/config/jungfrau/pixel_mask/JF03T01V02/pixel_mask_half_chip.h5 + elif [ ${DETECTOR} == "JF02T09V02" ] + then + time python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --number_bad_modules=1 + elif [ ${DETECTOR} == "JF06T08V02" ] + then + time python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --add_pixel_mask /sf/alvra/config/jungfrau/pixel_mask/JF06T08V01/mask_2lines_module3.h5 +# elif [ ${DETECTOR} == "JF06T32V02" ] +# then +# time python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --add_pixel_mask /sf/alvra/config/jungfrau/pixel_mask/JF06T32V02/mask_noise_in_28.h5 + elif [ ${DETECTOR} == "JF13T01V01" ] + then + time python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --add_pixel_mask /sf/bernina/config/jungfrau/pixel_mask/JF13T01V01/pixel_mask_bad_rb_22.09.2020.h5 + elif [ ${DETECTOR} == "JF11T04V01" ] + then + time python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --number_bad_modules=2 + elif [ ${DETECTOR} == "JF11T04V01" ] + then + time python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --number_bad_modules=1 + else + time python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG + fi + + fi + else echo "Will call compression/convertion ${OUTFILE_RAW} --> ${OUTFILE}" From c86f6307af86a4e053bf8475799ae1f78e3bce07 Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Tue, 1 Dec 2020 22:12:14 +0100 Subject: [PATCH 25/61] added JF04/JF09/JF10 detectors --- scripts/JF04-buffer-worker.sh | 17 +++++++++++++++++ scripts/JF04-buffer-worker@.service | 16 ++++++++++++++++ scripts/JF04-buffer.service | 10 ++++++++++ scripts/JF04-stream.service | 15 +++++++++++++++ scripts/JF04-stream.sh | 5 +++++ scripts/JF04-vis.service | 13 +++++++++++++ scripts/JF04-vis.sh | 19 +++++++++++++++++++ scripts/JF09-buffer-worker.sh | 17 +++++++++++++++++ scripts/JF09-buffer-worker@.service | 16 ++++++++++++++++ scripts/JF09-buffer.service | 10 ++++++++++ scripts/JF09-stream.service | 15 +++++++++++++++ scripts/JF09-stream.sh | 5 +++++ scripts/JF09-vis.service | 13 +++++++++++++ scripts/JF09-vis.sh | 19 +++++++++++++++++++ scripts/JF10-buffer-worker.sh | 17 +++++++++++++++++ scripts/JF10-buffer-worker@.service | 16 ++++++++++++++++ scripts/JF10-buffer.service | 10 ++++++++++ scripts/JF10-stream.service | 15 +++++++++++++++ scripts/JF10-stream.sh | 5 +++++ scripts/JF10-vis.service | 13 +++++++++++++ scripts/JF10-vis.sh | 19 +++++++++++++++++++ 21 files changed, 285 insertions(+) create mode 100644 scripts/JF04-buffer-worker.sh create mode 100644 scripts/JF04-buffer-worker@.service create mode 100644 scripts/JF04-buffer.service create mode 100644 scripts/JF04-stream.service create mode 100644 scripts/JF04-stream.sh create mode 100644 scripts/JF04-vis.service create mode 100644 scripts/JF04-vis.sh create mode 100644 scripts/JF09-buffer-worker.sh create mode 100644 scripts/JF09-buffer-worker@.service create mode 100644 scripts/JF09-buffer.service create mode 100644 scripts/JF09-stream.service create mode 100644 scripts/JF09-stream.sh create mode 100644 scripts/JF09-vis.service create mode 100644 scripts/JF09-vis.sh create mode 100644 scripts/JF10-buffer-worker.sh create mode 100644 scripts/JF10-buffer-worker@.service create mode 100644 scripts/JF10-buffer.service create mode 100644 scripts/JF10-stream.service create mode 100644 scripts/JF10-stream.sh create mode 100644 scripts/JF10-vis.service create mode 100644 scripts/JF10-vis.sh diff --git a/scripts/JF04-buffer-worker.sh b/scripts/JF04-buffer-worker.sh new file mode 100644 index 0000000..c479a3d --- /dev/null +++ b/scripts/JF04-buffer-worker.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +if [ $# != 1 ] +then + systemctl start JF04-buffer-worker@00 + exit +fi + +M=$1 + +coreAssociatedBuffer=(33) + +initialUDPport=50040 +port=$((${initialUDPport}+10#${M})) +DETECTOR=JF04T01V01 + +taskset -c ${coreAssociatedBuffer[10#${M}]} /usr/local/bin/sf_buffer ${DETECTOR} M${M} ${port} /gpfs/photonics/swissfel/buffer/${DETECTOR} ${M} diff --git a/scripts/JF04-buffer-worker@.service b/scripts/JF04-buffer-worker@.service new file mode 100644 index 0000000..b913054 --- /dev/null +++ b/scripts/JF04-buffer-worker@.service @@ -0,0 +1,16 @@ +[Unit] +Description=JF04 UDP2buffer worker instance as a service, instance %i +Requires=JF04-buffer.service +Before=JF04-buffer.service +BindsTo=JF04-buffer.service + +[Service] +PermissionsStartOnly=true +Type=idle +User=root +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF04-buffer-worker.sh %i +TimeoutStartSec=10 +RestartSec=1 + +[Install] +WantedBy=JF04-buffer.service diff --git a/scripts/JF04-buffer.service b/scripts/JF04-buffer.service new file mode 100644 index 0000000..6bfdadb --- /dev/null +++ b/scripts/JF04-buffer.service @@ -0,0 +1,10 @@ +[Unit] +Description=All UDP-buffer instances of JF04 + +[Service] +Type=oneshot +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF04-buffer-worker.sh +RemainAfterExit=yes + +[Install] +WantedBy=multi-user.target diff --git a/scripts/JF04-stream.service b/scripts/JF04-stream.service new file mode 100644 index 0000000..8ffd01a --- /dev/null +++ b/scripts/JF04-stream.service @@ -0,0 +1,15 @@ +[Unit] +Description=stream service (to streamvis and live analysis) of JF04 + +[Service] +PermissionsStartOnly=true +Type=idle +User=root +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF04-stream.sh +TimeoutStartSec=10 +Restart=on-failure +RestartSec=1 + +[Install] +WantedBy=multi-user.target + diff --git a/scripts/JF04-stream.sh b/scripts/JF04-stream.sh new file mode 100644 index 0000000..d38ce38 --- /dev/null +++ b/scripts/JF04-stream.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +coreAssociated="36" + +taskset -c ${coreAssociated} /usr/local/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF04.json diff --git a/scripts/JF04-vis.service b/scripts/JF04-vis.service new file mode 100644 index 0000000..78ae2bb --- /dev/null +++ b/scripts/JF04-vis.service @@ -0,0 +1,13 @@ +[Unit] +Description=streamvis: JF04 + +[Service] +User=root +TimeoutStartSec=2 +ExecStart=/bin/bash ./home/dbe/service_scripts/JF04-vis.sh +Restart=on-failure +RestartSec=4 + +[Install] +WantedBy=multi-user.target + diff --git a/scripts/JF04-vis.sh b/scripts/JF04-vis.sh new file mode 100644 index 0000000..4fd87a0 --- /dev/null +++ b/scripts/JF04-vis.sh @@ -0,0 +1,19 @@ +export PATH=/home/dbe/miniconda3/bin:$PATH + +source /home/dbe/miniconda3/etc/profile.d/conda.sh + +conda deactivate +conda activate vis + +PORT=5004 +PORT_BACKEND=9004 + +H=`echo ${HOSTNAME} | sed 's/.psi.ch//'` +BACKEND=${H} + +taskset -c 16 \ +streamvis bernina --allow-websocket-origin=${H}:${PORT} \ +--allow-websocket-origin=sf-daq-bernina:${PORT} --port=${PORT} \ +--address tcp://${BACKEND}:${PORT_BACKEND} \ +--page-title Fluorescence + diff --git a/scripts/JF09-buffer-worker.sh b/scripts/JF09-buffer-worker.sh new file mode 100644 index 0000000..6056cc6 --- /dev/null +++ b/scripts/JF09-buffer-worker.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +if [ $# != 1 ] +then + systemctl start JF09-buffer-worker@00 + exit +fi + +M=$1 + +coreAssociatedBuffer=(34) + +initialUDPport=50150 +port=$((${initialUDPport}+10#${M})) +DETECTOR=JF09T01V01 + +taskset -c ${coreAssociatedBuffer[10#${M}]} /usr/local/bin/sf_buffer ${DETECTOR} M${M} ${port} /gpfs/photonics/swissfel/buffer/${DETECTOR} ${M} diff --git a/scripts/JF09-buffer-worker@.service b/scripts/JF09-buffer-worker@.service new file mode 100644 index 0000000..b199c34 --- /dev/null +++ b/scripts/JF09-buffer-worker@.service @@ -0,0 +1,16 @@ +[Unit] +Description=JF09 UDP2buffer worker instance as a service, instance %i +Requires=JF09-buffer.service +Before=JF09-buffer.service +BindsTo=JF09-buffer.service + +[Service] +PermissionsStartOnly=true +Type=idle +User=root +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF09-buffer-worker.sh %i +TimeoutStartSec=10 +RestartSec=1 + +[Install] +WantedBy=JF09-buffer.service diff --git a/scripts/JF09-buffer.service b/scripts/JF09-buffer.service new file mode 100644 index 0000000..1423a8f --- /dev/null +++ b/scripts/JF09-buffer.service @@ -0,0 +1,10 @@ +[Unit] +Description=All UDP-buffer instances of JF09 + +[Service] +Type=oneshot +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF09-buffer-worker.sh +RemainAfterExit=yes + +[Install] +WantedBy=multi-user.target diff --git a/scripts/JF09-stream.service b/scripts/JF09-stream.service new file mode 100644 index 0000000..3dc6548 --- /dev/null +++ b/scripts/JF09-stream.service @@ -0,0 +1,15 @@ +[Unit] +Description=stream service (to streamvis and live analysis) of JF09 + +[Service] +PermissionsStartOnly=true +Type=idle +User=root +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF09-stream.sh +TimeoutStartSec=10 +Restart=on-failure +RestartSec=1 + +[Install] +WantedBy=multi-user.target + diff --git a/scripts/JF09-stream.sh b/scripts/JF09-stream.sh new file mode 100644 index 0000000..f0b5a33 --- /dev/null +++ b/scripts/JF09-stream.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +coreAssociated="37" + +taskset -c ${coreAssociated} /usr/local/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF09.json diff --git a/scripts/JF09-vis.service b/scripts/JF09-vis.service new file mode 100644 index 0000000..ed7815e --- /dev/null +++ b/scripts/JF09-vis.service @@ -0,0 +1,13 @@ +[Unit] +Description=streamvis: JF09 + +[Service] +User=root +TimeoutStartSec=2 +ExecStart=/bin/bash ./home/dbe/service_scripts/JF09-vis.sh +Restart=on-failure +RestartSec=4 + +[Install] +WantedBy=multi-user.target + diff --git a/scripts/JF09-vis.sh b/scripts/JF09-vis.sh new file mode 100644 index 0000000..b8e2b24 --- /dev/null +++ b/scripts/JF09-vis.sh @@ -0,0 +1,19 @@ +export PATH=/home/dbe/miniconda3/bin:$PATH + +source /home/dbe/miniconda3/etc/profile.d/conda.sh + +conda deactivate +conda activate vis + +PORT=5009 +PORT_BACKEND=9009 + +H=`echo ${HOSTNAME} | sed 's/.psi.ch//'` +BACKEND=${H} + +taskset -c 17 \ +streamvis alvra --allow-websocket-origin=${H}:${PORT} \ +--allow-websocket-origin=sf-daq-alvra:${PORT} --port=${PORT} \ +--address tcp://${BACKEND}:${PORT_BACKEND} \ +--page-title FLEX:Normal + diff --git a/scripts/JF10-buffer-worker.sh b/scripts/JF10-buffer-worker.sh new file mode 100644 index 0000000..7f57f56 --- /dev/null +++ b/scripts/JF10-buffer-worker.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +if [ $# != 1 ] +then + systemctl start JF10-buffer-worker@00 + exit +fi + +M=$1 + +coreAssociatedBuffer=(35) + +initialUDPport=50160 +port=$((${initialUDPport}+10#${M})) +DETECTOR=JF10T01V01 + +taskset -c ${coreAssociatedBuffer[10#${M}]} /usr/local/bin/sf_buffer ${DETECTOR} M${M} ${port} /gpfs/photonics/swissfel/buffer/${DETECTOR} ${M} diff --git a/scripts/JF10-buffer-worker@.service b/scripts/JF10-buffer-worker@.service new file mode 100644 index 0000000..508c60e --- /dev/null +++ b/scripts/JF10-buffer-worker@.service @@ -0,0 +1,16 @@ +[Unit] +Description=JF10 UDP2buffer worker instance as a service, instance %i +Requires=JF10-buffer.service +Before=JF10-buffer.service +BindsTo=JF10-buffer.service + +[Service] +PermissionsStartOnly=true +Type=idle +User=root +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF10-buffer-worker.sh %i +TimeoutStartSec=10 +RestartSec=1 + +[Install] +WantedBy=JF10-buffer.service diff --git a/scripts/JF10-buffer.service b/scripts/JF10-buffer.service new file mode 100644 index 0000000..5a820de --- /dev/null +++ b/scripts/JF10-buffer.service @@ -0,0 +1,10 @@ +[Unit] +Description=All UDP-buffer instances of JF10 + +[Service] +Type=oneshot +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF10-buffer-worker.sh +RemainAfterExit=yes + +[Install] +WantedBy=multi-user.target diff --git a/scripts/JF10-stream.service b/scripts/JF10-stream.service new file mode 100644 index 0000000..786222f --- /dev/null +++ b/scripts/JF10-stream.service @@ -0,0 +1,15 @@ +[Unit] +Description=stream service (to streamvis and live analysis) of JF10 + +[Service] +PermissionsStartOnly=true +Type=idle +User=root +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF10-stream.sh +TimeoutStartSec=10 +Restart=on-failure +RestartSec=1 + +[Install] +WantedBy=multi-user.target + diff --git a/scripts/JF10-stream.sh b/scripts/JF10-stream.sh new file mode 100644 index 0000000..507e28b --- /dev/null +++ b/scripts/JF10-stream.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +coreAssociated="38" + +taskset -c ${coreAssociated} /usr/local/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF10.json diff --git a/scripts/JF10-vis.service b/scripts/JF10-vis.service new file mode 100644 index 0000000..b08cb69 --- /dev/null +++ b/scripts/JF10-vis.service @@ -0,0 +1,13 @@ +[Unit] +Description=streamvis: JF10 + +[Service] +User=root +TimeoutStartSec=2 +ExecStart=/bin/bash ./home/dbe/service_scripts/JF10-vis.sh +Restart=on-failure +RestartSec=4 + +[Install] +WantedBy=multi-user.target + diff --git a/scripts/JF10-vis.sh b/scripts/JF10-vis.sh new file mode 100644 index 0000000..f7f6a9a --- /dev/null +++ b/scripts/JF10-vis.sh @@ -0,0 +1,19 @@ +export PATH=/home/dbe/miniconda3/bin:$PATH + +source /home/dbe/miniconda3/etc/profile.d/conda.sh + +conda deactivate +conda activate vis + +PORT=5010 +PORT_BACKEND=9010 + +H=`echo ${HOSTNAME} | sed 's/.psi.ch//'` +BACKEND=${H} + +taskset -c 18 \ +streamvis alvra --allow-websocket-origin=${H}:${PORT} \ +--allow-websocket-origin=sf-daq-alvra:${PORT} --port=${PORT} \ +--address tcp://${BACKEND}:${PORT_BACKEND} \ +--page-title FLEX:Stripsel + From 837f34ecc8d57925eccf9c9949493642667d5786 Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Wed, 2 Dec 2020 11:43:07 +0100 Subject: [PATCH 26/61] proper treatment of JF10(stripsel) detector in conversion by sf_daq procedure --- scripts/retrieve_detector_data.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/retrieve_detector_data.sh b/scripts/retrieve_detector_data.sh index f1c7822..b1243cf 100755 --- a/scripts/retrieve_detector_data.sh +++ b/scripts/retrieve_detector_data.sh @@ -189,7 +189,7 @@ then elif [ ${DETECTOR} == "JF11T04V01" ] then time python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --number_bad_modules=2 - elif [ ${DETECTOR} == "JF11T04V01" ] + elif [ ${DETECTOR} == "JF10T04V01" ] then time python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --number_bad_modules=1 else From f46acddb61a766bb7c83c7f76de8272c1701b316 Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Wed, 2 Dec 2020 11:56:20 +0100 Subject: [PATCH 27/61] typo --- scripts/retrieve_detector_data.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/retrieve_detector_data.sh b/scripts/retrieve_detector_data.sh index b1243cf..795daf8 100755 --- a/scripts/retrieve_detector_data.sh +++ b/scripts/retrieve_detector_data.sh @@ -189,7 +189,7 @@ then elif [ ${DETECTOR} == "JF11T04V01" ] then time python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --number_bad_modules=2 - elif [ ${DETECTOR} == "JF10T04V01" ] + elif [ ${DETECTOR} == "JF10T01V01" ] then time python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --number_bad_modules=1 else From 344f931f53aa98bf135f7d9a76a3e69e2b85962b Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Fri, 4 Dec 2020 18:13:20 +0100 Subject: [PATCH 28/61] jail conversion of pedestal to the conversion cores --- scripts/retrieve_detector_data.sh | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/scripts/retrieve_detector_data.sh b/scripts/retrieve_detector_data.sh index 795daf8..894c627 100755 --- a/scripts/retrieve_detector_data.sh +++ b/scripts/retrieve_detector_data.sh @@ -170,30 +170,30 @@ then if [ ${DETECTOR} == "JF07T32V01" ] then - time python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --add_pixel_mask /sf/bernina/config/jungfrau/pixel_mask/JF07T32V01/pixel_mask_13_full.h5 + time taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --add_pixel_mask /sf/bernina/config/jungfrau/pixel_mask/JF07T32V01/pixel_mask_13_full.h5 elif [ ${DETECTOR} == "JF03T01V02" ] then - time python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --add_pixel_mask /sf/bernina/config/jungfrau/pixel_mask/JF03T01V02/pixel_mask_half_chip.h5 + time taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --add_pixel_mask /sf/bernina/config/jungfrau/pixel_mask/JF03T01V02/pixel_mask_half_chip.h5 elif [ ${DETECTOR} == "JF02T09V02" ] then - time python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --number_bad_modules=1 + time taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --number_bad_modules=1 elif [ ${DETECTOR} == "JF06T08V02" ] then - time python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --add_pixel_mask /sf/alvra/config/jungfrau/pixel_mask/JF06T08V01/mask_2lines_module3.h5 + time taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --add_pixel_mask /sf/alvra/config/jungfrau/pixel_mask/JF06T08V01/mask_2lines_module3.h5 # elif [ ${DETECTOR} == "JF06T32V02" ] # then -# time python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --add_pixel_mask /sf/alvra/config/jungfrau/pixel_mask/JF06T32V02/mask_noise_in_28.h5 +# time taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --add_pixel_mask /sf/alvra/config/jungfrau/pixel_mask/JF06T32V02/mask_noise_in_28.h5 elif [ ${DETECTOR} == "JF13T01V01" ] then - time python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --add_pixel_mask /sf/bernina/config/jungfrau/pixel_mask/JF13T01V01/pixel_mask_bad_rb_22.09.2020.h5 + time taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --add_pixel_mask /sf/bernina/config/jungfrau/pixel_mask/JF13T01V01/pixel_mask_bad_rb_22.09.2020.h5 elif [ ${DETECTOR} == "JF11T04V01" ] then - time python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --number_bad_modules=2 + time taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --number_bad_modules=2 elif [ ${DETECTOR} == "JF10T01V01" ] then - time python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --number_bad_modules=1 + time taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --number_bad_modules=1 else - time python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG + time taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG fi fi @@ -225,7 +225,7 @@ else taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/export_file.py ${OUTFILE_RAW} ${OUTFILE} ${RUN_FILE} ${DET_CONFIG_FILE} if [ ${DETECTOR} == "JF06T32V02" ] || [ ${DETECTOR} == "JF06T08V02" ] then - python /home/dbe/git/sf_daq_buffer/scripts/make_crystfel_list.py ${OUTFILE} ${RUN_FILE} ${DETECTOR} + taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/make_crystfel_list.py ${OUTFILE} ${RUN_FILE} ${DETECTOR} fi date5=$(date +%s) echo "Finished : "`date` From e68a77ce8b01337717e7100096d11ecc7c45df59 Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Fri, 4 Dec 2020 18:40:42 +0100 Subject: [PATCH 29/61] arrangement of alra detectors on daq8 server --- scripts/JF02-buffer-worker.sh | 2 +- scripts/JF02-stream.sh | 2 +- scripts/JF02-vis.sh | 2 +- scripts/JF04-buffer-worker.sh | 2 +- scripts/JF04-stream.sh | 2 +- scripts/JF04-vis.sh | 4 +++- scripts/JF06-buffer-worker.sh | 6 +----- scripts/JF06-stream.sh | 3 +-- scripts/JF06-vis.sh | 4 +++- scripts/JF06_4M-buffer-worker.sh | 5 +---- scripts/JF06_4M-stream.sh | 2 +- scripts/JF09-buffer-worker.sh | 2 +- scripts/JF09-stream.sh | 2 +- scripts/JF09-vis.sh | 4 +++- scripts/JF10-buffer-worker.sh | 2 +- scripts/JF10-stream.sh | 2 +- scripts/JF10-vis.sh | 4 +++- 17 files changed, 25 insertions(+), 25 deletions(-) diff --git a/scripts/JF02-buffer-worker.sh b/scripts/JF02-buffer-worker.sh index 8ab83c8..a358eec 100644 --- a/scripts/JF02-buffer-worker.sh +++ b/scripts/JF02-buffer-worker.sh @@ -15,7 +15,7 @@ case ${H} in coreAssociatedBuffer=(11 12 13 14 15 16 17 18 19) ;; 'sf-daq-8') - coreAssociatedBuffer=(11 11 12 12 13 13 1 1 0) + coreAssociatedBuffer=(1 1 1 2 2 2 3 3 3) ;; *) CORES=(25 25 26 26 27 27 28 28 29) diff --git a/scripts/JF02-stream.sh b/scripts/JF02-stream.sh index 0df4e74..2ee361f 100644 --- a/scripts/JF02-stream.sh +++ b/scripts/JF02-stream.sh @@ -7,7 +7,7 @@ case ${H} in config=/gpfs/photonics/swissfel/buffer/config/stream-JF02.json ;; 'sf-daq-8') - coreAssociated="14,15,16" + coreAssociated="20,21" config=/gpfs/photonics/swissfel/buffer/config/stream-JF02.daq8.json ;; *) diff --git a/scripts/JF02-vis.sh b/scripts/JF02-vis.sh index 2aa7816..f13f7a8 100644 --- a/scripts/JF02-vis.sh +++ b/scripts/JF02-vis.sh @@ -17,7 +17,7 @@ case ${H} in CORES='36,37' ;; 'sf-daq-8') - CORES='17,18' + CORES='33,34' ;; *) CORES='2' diff --git a/scripts/JF04-buffer-worker.sh b/scripts/JF04-buffer-worker.sh index c479a3d..e0be303 100644 --- a/scripts/JF04-buffer-worker.sh +++ b/scripts/JF04-buffer-worker.sh @@ -8,7 +8,7 @@ fi M=$1 -coreAssociatedBuffer=(33) +coreAssociatedBuffer=(12) initialUDPport=50040 port=$((${initialUDPport}+10#${M})) diff --git a/scripts/JF04-stream.sh b/scripts/JF04-stream.sh index d38ce38..307df8f 100644 --- a/scripts/JF04-stream.sh +++ b/scripts/JF04-stream.sh @@ -1,5 +1,5 @@ #!/bin/bash -coreAssociated="36" +coreAssociated="27" taskset -c ${coreAssociated} /usr/local/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF04.json diff --git a/scripts/JF04-vis.sh b/scripts/JF04-vis.sh index 4fd87a0..72478a8 100644 --- a/scripts/JF04-vis.sh +++ b/scripts/JF04-vis.sh @@ -11,7 +11,9 @@ PORT_BACKEND=9004 H=`echo ${HOSTNAME} | sed 's/.psi.ch//'` BACKEND=${H} -taskset -c 16 \ +CORES=39 + +taskset -c ${CORES} \ streamvis bernina --allow-websocket-origin=${H}:${PORT} \ --allow-websocket-origin=sf-daq-bernina:${PORT} --port=${PORT} \ --address tcp://${BACKEND}:${PORT_BACKEND} \ diff --git a/scripts/JF06-buffer-worker.sh b/scripts/JF06-buffer-worker.sh index bf9691b..1c94d44 100644 --- a/scripts/JF06-buffer-worker.sh +++ b/scripts/JF06-buffer-worker.sh @@ -8,11 +8,7 @@ fi M=$1 -# Add ourselves to the user cpuset. -# echo $$ > /sys/fs/cgroup/cpuset/user/tasks - -#coreAssociatedBuffer=(22 22 23 23 24 24 25 25 26 26 27 27 28 28 29 29 30 30 31 31 32 32 33 33 34 34 35 35 36 36 37 37) -coreAssociatedBuffer=(6 6 7 7 8 8 9 9 10 10 22 22 23 23 24 24 25 25 26 26 27 27 28 28 29 29 30 30 31 31 32 32) +coreAssociatedBuffer=(4 4 4 4 5 5 5 5 6 6 6 6 7 7 7 7 8 8 8 8 9 9 9 9 10 10 10 10 11 11 11 11) initialUDPport=50060 port=$((${initialUDPport}+10#${M})) diff --git a/scripts/JF06-stream.sh b/scripts/JF06-stream.sh index 9a90023..28a48db 100644 --- a/scripts/JF06-stream.sh +++ b/scripts/JF06-stream.sh @@ -1,6 +1,5 @@ #!/bin/bash -coreAssociated="2,3,4,5" -#echo $$ > /sys/fs/cgroup/cpuset/user/tasks +coreAssociated="22,23,24" taskset -c ${coreAssociated} /usr/local/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF06.daq8.json diff --git a/scripts/JF06-vis.sh b/scripts/JF06-vis.sh index 829531b..a3050f9 100644 --- a/scripts/JF06-vis.sh +++ b/scripts/JF06-vis.sh @@ -11,7 +11,9 @@ PORT_BACKEND=9006 H=`echo ${HOSTNAME} | sed 's/.psi.ch//'` BACKEND=${H} -taskset -c 19,20 \ +CORES="35,36" + +taskset -c ${CORES} \ streamvis default16m --allow-websocket-origin=${H}:${PORT} --allow-websocket-origin=sf-daq-alvra:${PORT} \ --port=${PORT} --address tcp://${BACKEND}:${PORT_BACKEND} \ --page-title 16M_Jungfrau_Alvra diff --git a/scripts/JF06_4M-buffer-worker.sh b/scripts/JF06_4M-buffer-worker.sh index 6d0c062..7bc9afd 100644 --- a/scripts/JF06_4M-buffer-worker.sh +++ b/scripts/JF06_4M-buffer-worker.sh @@ -8,10 +8,7 @@ fi M=$1 -# Add ourselves to the user cpuset. -# echo $$ > /sys/fs/cgroup/cpuset/user/tasks - -coreAssociatedBuffer=(6 7 8 9 10 22 23 24) +coreAssociatedBuffer=(4 5 6 7 8 9 10 11) initialUDPport=50060 port=$((${initialUDPport}+10#${M})) diff --git a/scripts/JF06_4M-stream.sh b/scripts/JF06_4M-stream.sh index 21966d4..44c5f49 100644 --- a/scripts/JF06_4M-stream.sh +++ b/scripts/JF06_4M-stream.sh @@ -1,5 +1,5 @@ #!/bin/bash -coreAssociated="2,3,4,5" +coreAssociated="20,21" taskset -c ${coreAssociated} /usr/local/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF06_4M.daq8.json diff --git a/scripts/JF09-buffer-worker.sh b/scripts/JF09-buffer-worker.sh index 6056cc6..49f6013 100644 --- a/scripts/JF09-buffer-worker.sh +++ b/scripts/JF09-buffer-worker.sh @@ -8,7 +8,7 @@ fi M=$1 -coreAssociatedBuffer=(34) +coreAssociatedBuffer=(12) initialUDPport=50150 port=$((${initialUDPport}+10#${M})) diff --git a/scripts/JF09-stream.sh b/scripts/JF09-stream.sh index f0b5a33..e1c239b 100644 --- a/scripts/JF09-stream.sh +++ b/scripts/JF09-stream.sh @@ -1,5 +1,5 @@ #!/bin/bash -coreAssociated="37" +coreAssociated="25" taskset -c ${coreAssociated} /usr/local/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF09.json diff --git a/scripts/JF09-vis.sh b/scripts/JF09-vis.sh index b8e2b24..3b7eeb7 100644 --- a/scripts/JF09-vis.sh +++ b/scripts/JF09-vis.sh @@ -11,7 +11,9 @@ PORT_BACKEND=9009 H=`echo ${HOSTNAME} | sed 's/.psi.ch//'` BACKEND=${H} -taskset -c 17 \ +CORES=37 + +taskset -c ${CORES} \ streamvis alvra --allow-websocket-origin=${H}:${PORT} \ --allow-websocket-origin=sf-daq-alvra:${PORT} --port=${PORT} \ --address tcp://${BACKEND}:${PORT_BACKEND} \ diff --git a/scripts/JF10-buffer-worker.sh b/scripts/JF10-buffer-worker.sh index 7f57f56..b17dac8 100644 --- a/scripts/JF10-buffer-worker.sh +++ b/scripts/JF10-buffer-worker.sh @@ -8,7 +8,7 @@ fi M=$1 -coreAssociatedBuffer=(35) +coreAssociatedBuffer=(12) initialUDPport=50160 port=$((${initialUDPport}+10#${M})) diff --git a/scripts/JF10-stream.sh b/scripts/JF10-stream.sh index 507e28b..d204c45 100644 --- a/scripts/JF10-stream.sh +++ b/scripts/JF10-stream.sh @@ -1,5 +1,5 @@ #!/bin/bash -coreAssociated="38" +coreAssociated="26" taskset -c ${coreAssociated} /usr/local/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF10.json diff --git a/scripts/JF10-vis.sh b/scripts/JF10-vis.sh index f7f6a9a..fe77449 100644 --- a/scripts/JF10-vis.sh +++ b/scripts/JF10-vis.sh @@ -11,7 +11,9 @@ PORT_BACKEND=9010 H=`echo ${HOSTNAME} | sed 's/.psi.ch//'` BACKEND=${H} -taskset -c 18 \ +CORES=38 + +taskset -c ${CORES} \ streamvis alvra --allow-websocket-origin=${H}:${PORT} \ --allow-websocket-origin=sf-daq-alvra:${PORT} --port=${PORT} \ --address tcp://${BACKEND}:${PORT_BACKEND} \ From 13218459bc2d9765187334ba37bb6edd4242c195 Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Sun, 6 Dec 2020 10:05:43 +0100 Subject: [PATCH 30/61] restart stream service when config file changed --- scripts/JF01-stream.sh | 6 +++++- scripts/JF02-stream.sh | 11 ++++++++--- scripts/JF04-stream.sh | 6 +++++- scripts/JF06-stream.sh | 6 +++++- scripts/JF06_4M-stream.sh | 7 ++++++- scripts/JF07-stream.sh | 6 +++++- scripts/JF09-stream.sh | 6 +++++- scripts/JF10-stream.sh | 6 +++++- scripts/JF11-stream.sh | 6 +++++- scripts/JF13-stream.sh | 6 +++++- scripts/check_config_changed.sh | 21 +++++++++++++++++++++ 11 files changed, 75 insertions(+), 12 deletions(-) create mode 100755 scripts/check_config_changed.sh diff --git a/scripts/JF01-stream.sh b/scripts/JF01-stream.sh index 85ed846..6341508 100644 --- a/scripts/JF01-stream.sh +++ b/scripts/JF01-stream.sh @@ -1,5 +1,9 @@ #!/bin/bash coreAssociated="24" +CONFIG=/gpfs/photonics/swissfel/buffer/config/stream-JF01.json +SERVICE=JF01-stream -taskset -c ${coreAssociated} /usr/local/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF01.json +/home/dbe/git/sf_daq_buffer/scripts/check_config_changed.sh ${CONFIG} ${SERVICE} & + +taskset -c ${coreAssociated} /usr/local/bin/sf_stream ${CONFIG} diff --git a/scripts/JF02-stream.sh b/scripts/JF02-stream.sh index 2ee361f..7912e94 100644 --- a/scripts/JF02-stream.sh +++ b/scripts/JF02-stream.sh @@ -4,14 +4,19 @@ H=`echo ${HOSTNAME} | sed 's/.psi.ch//'` case ${H} in 'sf-daq-4') coreAssociated="33,34,35" - config=/gpfs/photonics/swissfel/buffer/config/stream-JF02.json + CONFIG=/gpfs/photonics/swissfel/buffer/config/stream-JF02.daq4.json ;; 'sf-daq-8') coreAssociated="20,21" - config=/gpfs/photonics/swissfel/buffer/config/stream-JF02.daq8.json + CONFIG=/gpfs/photonics/swissfel/buffer/config/stream-JF02.json ;; *) coreAssociated="12" esac -taskset -c ${coreAssociated} /usr/local/bin/sf_stream ${config} +SERVICE=JF02-stream + +/home/dbe/git/sf_daq_buffer/scripts/check_config_changed.sh ${CONFIG} ${SERVICE} & + +taskset -c ${coreAssociated} /usr/local/bin/sf_stream ${CONFIG} + diff --git a/scripts/JF04-stream.sh b/scripts/JF04-stream.sh index 307df8f..e8690d2 100644 --- a/scripts/JF04-stream.sh +++ b/scripts/JF04-stream.sh @@ -1,5 +1,9 @@ #!/bin/bash coreAssociated="27" +CONFIG=/gpfs/photonics/swissfel/buffer/config/stream-JF04.json +SERVICE=JF04-stream -taskset -c ${coreAssociated} /usr/local/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF04.json +/home/dbe/git/sf_daq_buffer/scripts/check_config_changed.sh ${CONFIG} ${SERVICE} & + +taskset -c ${coreAssociated} /usr/local/bin/sf_stream ${CONFIG} diff --git a/scripts/JF06-stream.sh b/scripts/JF06-stream.sh index 28a48db..03d6adf 100644 --- a/scripts/JF06-stream.sh +++ b/scripts/JF06-stream.sh @@ -1,5 +1,9 @@ #!/bin/bash coreAssociated="22,23,24" +CONFIG=/gpfs/photonics/swissfel/buffer/config/stream-JF06.json +SERVICE=JF06-stream -taskset -c ${coreAssociated} /usr/local/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF06.daq8.json +/home/dbe/git/sf_daq_buffer/scripts/check_config_changed.sh ${CONFIG} ${SERVICE} & + +taskset -c ${coreAssociated} /usr/local/bin/sf_stream ${CONFIG} diff --git a/scripts/JF06_4M-stream.sh b/scripts/JF06_4M-stream.sh index 44c5f49..7896ef8 100644 --- a/scripts/JF06_4M-stream.sh +++ b/scripts/JF06_4M-stream.sh @@ -1,5 +1,10 @@ #!/bin/bash coreAssociated="20,21" +CONFIG=/gpfs/photonics/swissfel/buffer/config/stream-JF06_4M.daq8.json +SERVICE=JF06_4M-stream + +/home/dbe/git/sf_daq_buffer/scripts/check_config_changed.sh ${CONFIG} ${SERVICE} & + +taskset -c ${coreAssociated} /usr/local/bin/sf_stream ${CONFIG} -taskset -c ${coreAssociated} /usr/local/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF06_4M.daq8.json diff --git a/scripts/JF07-stream.sh b/scripts/JF07-stream.sh index bcaf1d4..48a0e6b 100644 --- a/scripts/JF07-stream.sh +++ b/scripts/JF07-stream.sh @@ -1,5 +1,9 @@ #!/bin/bash coreAssociated="20,21,22,23" +CONFIG=/gpfs/photonics/swissfel/buffer/config/stream-JF07.json +SERVICE=JF07-stream -taskset -c ${coreAssociated} /usr/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF07.json +/home/dbe/git/sf_daq_buffer/scripts/check_config_changed.sh ${CONFIG} ${SERVICE} & + +taskset -c ${coreAssociated} /usr/local/bin/sf_stream ${CONFIG} diff --git a/scripts/JF09-stream.sh b/scripts/JF09-stream.sh index e1c239b..d05d2be 100644 --- a/scripts/JF09-stream.sh +++ b/scripts/JF09-stream.sh @@ -1,5 +1,9 @@ #!/bin/bash coreAssociated="25" +CONFIG=/gpfs/photonics/swissfel/buffer/config/stream-JF09.json +SERVICE=JF09-stream -taskset -c ${coreAssociated} /usr/local/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF09.json +/home/dbe/git/sf_daq_buffer/scripts/check_config_changed.sh ${CONFIG} ${SERVICE} & + +taskset -c ${coreAssociated} /usr/local/bin/sf_stream ${CONFIG} diff --git a/scripts/JF10-stream.sh b/scripts/JF10-stream.sh index d204c45..b73b2df 100644 --- a/scripts/JF10-stream.sh +++ b/scripts/JF10-stream.sh @@ -1,5 +1,9 @@ #!/bin/bash coreAssociated="26" +CONFIG=/gpfs/photonics/swissfel/buffer/config/stream-JF10.json +SERVICE=JF10-stream -taskset -c ${coreAssociated} /usr/local/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF10.json +/home/dbe/git/sf_daq_buffer/scripts/check_config_changed.sh ${CONFIG} ${SERVICE} & + +taskset -c ${coreAssociated} /usr/local/bin/sf_stream ${CONFIG} diff --git a/scripts/JF11-stream.sh b/scripts/JF11-stream.sh index e5fb95f..93db1d6 100644 --- a/scripts/JF11-stream.sh +++ b/scripts/JF11-stream.sh @@ -1,5 +1,9 @@ #!/bin/bash coreAssociated="14,15,16" +CONFIG=/gpfs/photonics/swissfel/buffer/config/stream-JF11.json +SERVICE=JF11-stream -taskset -c ${coreAssociated} /usr/local/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF11.json +/home/dbe/git/sf_daq_buffer/scripts/check_config_changed.sh ${CONFIG} ${SERVICE} & + +taskset -c ${coreAssociated} /usr/local/bin/sf_stream ${CONFIG} diff --git a/scripts/JF13-stream.sh b/scripts/JF13-stream.sh index 3b8e226..4489505 100644 --- a/scripts/JF13-stream.sh +++ b/scripts/JF13-stream.sh @@ -1,5 +1,9 @@ #!/bin/bash coreAssociated="25" +CONFIG=/gpfs/photonics/swissfel/buffer/config/stream-JF13.json +SERVICE=JF13-stream -taskset -c ${coreAssociated} /usr/bin/sf_stream /gpfs/photonics/swissfel/buffer/config/stream-JF13.json +/home/dbe/git/sf_daq_buffer/scripts/check_config_changed.sh ${CONFIG} ${SERVICE} & + +taskset -c ${coreAssociated} /usr/local/bin/sf_stream ${CONFIG} diff --git a/scripts/check_config_changed.sh b/scripts/check_config_changed.sh new file mode 100755 index 0000000..bf6b0fe --- /dev/null +++ b/scripts/check_config_changed.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +F=$1 +S=$2 + +t=`stat -c %y $F` + +while true +do + sleep 5 + t1=`stat -c %y $F` + + if [ "${t1}" != "${t}" ] + then + echo $F changed + t=${t1} + systemctl restart ${S} + fi + +done + From 4f69fec8385f6015a05be9a6db2de40b7c716a19 Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Wed, 9 Dec 2020 12:51:49 +0100 Subject: [PATCH 31/61] add JF03, copy pedestal files to common place --- scripts/copy_pedestal_file.py | 44 +++++++++++++++++++++++++++++++ scripts/retrieve_detector_data.sh | 10 ++++++- 2 files changed, 53 insertions(+), 1 deletion(-) create mode 100644 scripts/copy_pedestal_file.py diff --git a/scripts/copy_pedestal_file.py b/scripts/copy_pedestal_file.py new file mode 100644 index 0000000..4235d63 --- /dev/null +++ b/scripts/copy_pedestal_file.py @@ -0,0 +1,44 @@ +import argparse +import json +import os +import datetime +from shutil import copyfile + +PEDESTAL_DIRECTORY="/sf/jungfrau/data/pedestal" + +parser = argparse.ArgumentParser() + +parser.add_argument("file_pedestal", type=str) +parser.add_argument("json_run", type=str) +parser.add_argument("detector", type=str) +parser.add_argument("json_stream", type=str) + +args = parser.parse_args() + +with open(args.json_run, "r") as run_file: + data = json.load(run_file) + + request_time=datetime.datetime.strptime(data["request_time"], '%Y-%m-%d %H:%M:%S.%f') + + if not os.path.isdir(f'{PEDESTAL_DIRECTORY}/{args.detector}'): + os.mkdir(f'{PEDESTAL_DIRECTORY}/{args.detector}') + + out_name = f'{PEDESTAL_DIRECTORY}/{args.detector}/{request_time.strftime("%Y%m%d_%H%M%S")}.h5' + copyfile(args.file_pedestal, out_name) + + print(f'Copied resulting pedestal file {args.file_pedestal} to {out_name}') + + if not os.path.exists(args.json_stream): + print(f'stream file {args.json_stream} does not exists, exiting') + exit() + + with open(args.json_stream, "r") as stream_file: + det = json.load(stream_file) + + print(f'Changing in stream file {args.json_stream} pedestal from {det["pedestal_file"]} to {out_name}') + + det["pedestal_file"] = out_name + + with open(args.json_stream, "w") as write_file: + json.dump(det, write_file, indent=4) + diff --git a/scripts/retrieve_detector_data.sh b/scripts/retrieve_detector_data.sh index 894c627..3c87e49 100755 --- a/scripts/retrieve_detector_data.sh +++ b/scripts/retrieve_detector_data.sh @@ -59,6 +59,10 @@ case ${DETECTOR} in NM=1 DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF04.json ;; +'JF03T01V01') + NM=1 + DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF03.json + ;; 'JF06T32V02') NM=32 DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF06.json @@ -196,6 +200,10 @@ then time taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG fi + PEDESTAL_FILE=`echo ${OUTFILE_RAW} | sed 's/.h5/.res.h5/'` + + taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/copy_pedestal_file.py ${PEDESTAL_FILE} ${RUN_FILE} ${DETECTOR} ${DET_CONFIG_FILE} + fi else @@ -222,7 +230,7 @@ else conda deactivate conda activate sf-daq - taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/export_file.py ${OUTFILE_RAW} ${OUTFILE} ${RUN_FILE} ${DET_CONFIG_FILE} + time taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/export_file.py ${OUTFILE_RAW} ${OUTFILE} ${RUN_FILE} ${DET_CONFIG_FILE} if [ ${DETECTOR} == "JF06T32V02" ] || [ ${DETECTOR} == "JF06T08V02" ] then taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/make_crystfel_list.py ${OUTFILE} ${RUN_FILE} ${DETECTOR} From 91f4348bf2fc8b024e2a0272429329c62192ef8b Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Wed, 9 Dec 2020 12:54:56 +0100 Subject: [PATCH 32/61] added JF03 (Bernina I0 detector) --- scripts/JF03-buffer-worker.sh | 17 +++++++++++++++++ scripts/JF03-buffer-worker@.service | 16 ++++++++++++++++ scripts/JF03-buffer.service | 10 ++++++++++ scripts/JF03-stream.service | 15 +++++++++++++++ scripts/JF03-stream.sh | 9 +++++++++ scripts/JF03-vis.service | 13 +++++++++++++ scripts/JF03-vis.sh | 21 +++++++++++++++++++++ 7 files changed, 101 insertions(+) create mode 100644 scripts/JF03-buffer-worker.sh create mode 100644 scripts/JF03-buffer-worker@.service create mode 100644 scripts/JF03-buffer.service create mode 100644 scripts/JF03-stream.service create mode 100644 scripts/JF03-stream.sh create mode 100644 scripts/JF03-vis.service create mode 100644 scripts/JF03-vis.sh diff --git a/scripts/JF03-buffer-worker.sh b/scripts/JF03-buffer-worker.sh new file mode 100644 index 0000000..00ecbc4 --- /dev/null +++ b/scripts/JF03-buffer-worker.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +if [ $# != 1 ] +then + systemctl start JF03-buffer-worker@00 + exit +fi + +M=$1 + +coreAssociatedBuffer=(12) + +initialUDPport=50030 +port=$((${initialUDPport}+10#${M})) +DETECTOR=JF03T01V01 + +taskset -c ${coreAssociatedBuffer[10#${M}]} /usr/local/bin/sf_buffer ${DETECTOR} M${M} ${port} /gpfs/photonics/swissfel/buffer/${DETECTOR} ${M} diff --git a/scripts/JF03-buffer-worker@.service b/scripts/JF03-buffer-worker@.service new file mode 100644 index 0000000..758aed9 --- /dev/null +++ b/scripts/JF03-buffer-worker@.service @@ -0,0 +1,16 @@ +[Unit] +Description=JF03 UDP2buffer worker instance as a service, instance %i +Requires=JF03-buffer.service +Before=JF03-buffer.service +BindsTo=JF03-buffer.service + +[Service] +PermissionsStartOnly=true +Type=idle +User=root +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF03-buffer-worker.sh %i +TimeoutStartSec=10 +RestartSec=1 + +[Install] +WantedBy=JF03-buffer.service diff --git a/scripts/JF03-buffer.service b/scripts/JF03-buffer.service new file mode 100644 index 0000000..7e87546 --- /dev/null +++ b/scripts/JF03-buffer.service @@ -0,0 +1,10 @@ +[Unit] +Description=All UDP-buffer instances of JF03 + +[Service] +Type=oneshot +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF03-buffer-worker.sh +RemainAfterExit=yes + +[Install] +WantedBy=multi-user.target diff --git a/scripts/JF03-stream.service b/scripts/JF03-stream.service new file mode 100644 index 0000000..0cce437 --- /dev/null +++ b/scripts/JF03-stream.service @@ -0,0 +1,15 @@ +[Unit] +Description=stream service (to streamvis and live analysis) of JF03 + +[Service] +PermissionsStartOnly=true +Type=idle +User=root +ExecStart=/usr/bin/sh /home/dbe/service_scripts/JF03-stream.sh +TimeoutStartSec=10 +Restart=on-failure +RestartSec=1 + +[Install] +WantedBy=multi-user.target + diff --git a/scripts/JF03-stream.sh b/scripts/JF03-stream.sh new file mode 100644 index 0000000..62ef433 --- /dev/null +++ b/scripts/JF03-stream.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +coreAssociated="27" +CONFIG=/gpfs/photonics/swissfel/buffer/config/stream-JF03.json +SERVICE=JF03-stream + +/home/dbe/git/sf_daq_buffer/scripts/check_config_changed.sh ${CONFIG} ${SERVICE} & + +taskset -c ${coreAssociated} /usr/local/bin/sf_stream ${CONFIG} diff --git a/scripts/JF03-vis.service b/scripts/JF03-vis.service new file mode 100644 index 0000000..636e27c --- /dev/null +++ b/scripts/JF03-vis.service @@ -0,0 +1,13 @@ +[Unit] +Description=streamvis: JF03 + +[Service] +User=root +TimeoutStartSec=2 +ExecStart=/bin/bash ./home/dbe/service_scripts/JF03-vis.sh +Restart=on-failure +RestartSec=4 + +[Install] +WantedBy=multi-user.target + diff --git a/scripts/JF03-vis.sh b/scripts/JF03-vis.sh new file mode 100644 index 0000000..9fa8dd4 --- /dev/null +++ b/scripts/JF03-vis.sh @@ -0,0 +1,21 @@ +export PATH=/home/dbe/miniconda3/bin:$PATH + +source /home/dbe/miniconda3/etc/profile.d/conda.sh + +conda deactivate +conda activate vis + +PORT=5003 +PORT_BACKEND=9003 + +H=`echo ${HOSTNAME} | sed 's/.psi.ch//'` +BACKEND=${H} + +CORES=39 + +taskset -c ${CORES} \ +streamvis bernina --allow-websocket-origin=${H}:${PORT} \ +--allow-websocket-origin=sf-daq-bernina:${PORT} --port=${PORT} \ +--address tcp://${BACKEND}:${PORT_BACKEND} \ +--page-title I0 + From b39fd93431ea518f690b4ab8a7107c97b9c4f2a6 Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Wed, 20 Jan 2021 11:10:39 +0100 Subject: [PATCH 33/61] Add method for creating image buffer files --- core-buffer/include/BufferUtils.hpp | 4 ++++ core-buffer/src/BufferUtils.cpp | 18 ++++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/core-buffer/include/BufferUtils.hpp b/core-buffer/include/BufferUtils.hpp index 18a3fe5..1403bd0 100644 --- a/core-buffer/include/BufferUtils.hpp +++ b/core-buffer/include/BufferUtils.hpp @@ -27,6 +27,10 @@ namespace BufferUtils const std::string& module_name, const uint64_t pulse_id); + std::string get_image_filename( + const std::string& detector_folder, + const uint64_t pulse_id); + std::size_t get_file_frame_index(const uint64_t pulse_id); void update_latest_file( diff --git a/core-buffer/src/BufferUtils.cpp b/core-buffer/src/BufferUtils.cpp index 147d4e8..c763966 100644 --- a/core-buffer/src/BufferUtils.cpp +++ b/core-buffer/src/BufferUtils.cpp @@ -11,6 +11,24 @@ using namespace std; using namespace buffer_config; +string BufferUtils::get_image_filename( + const std::string& detector_folder, + const uint64_t pulse_id) +{ + uint64_t data_folder = pulse_id / buffer_config::FOLDER_MOD; + data_folder *= buffer_config::FOLDER_MOD; + + uint64_t data_file = pulse_id / buffer_config::FILE_MOD; + data_file *= buffer_config::FILE_MOD; + + stringstream folder; + folder << detector_folder << "/"; + folder << data_folder << "/"; + folder << data_file << buffer_config::FILE_EXTENSION; + + return folder.str(); +} + string BufferUtils::get_filename( const std::string& detector_folder, const std::string& module_name, From 407b00890d9e28a3cda6eee60cbf11adba4447bb Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Thu, 21 Jan 2021 10:16:38 +0100 Subject: [PATCH 34/61] Finish BinaryWriter for Images in buffer --- jf-live-writer/include/ImageBinaryWriter.hpp | 21 ++--- jf-live-writer/src/ImageBinaryWriter.cpp | 80 +++++++------------- 2 files changed, 41 insertions(+), 60 deletions(-) diff --git a/jf-live-writer/include/ImageBinaryWriter.hpp b/jf-live-writer/include/ImageBinaryWriter.hpp index 8e6ebfb..ce80655 100644 --- a/jf-live-writer/include/ImageBinaryWriter.hpp +++ b/jf-live-writer/include/ImageBinaryWriter.hpp @@ -1,15 +1,16 @@ -#ifndef BINARYWRITER_HPP -#define BINARYWRITER_HPP +#ifndef IMAGEBINARYWRITER_HPP +#define IMAGEBINARYWRITER_HPP #include #include "formats.hpp" + + class ImageBinaryWriter { - - const size_t MAX_FILE_BYTES = - buffer_config::FILE_MOD * sizeof(BufferBinaryFormat); - + const size_t IMAGE_BYTES; + const size_t IMAGE_SLOT_BYTES; + const size_t MAX_FILE_BYTES; const std::string detector_folder_; std::string latest_filename_; @@ -21,13 +22,15 @@ class ImageBinaryWriter { public: - ImageBinaryWriter(const std::string& detector_folder); + ImageBinaryWriter( + const std::string& detector_folder, + const uint64_t image_n_bytes); virtual ~ImageBinaryWriter(); - void write(const uint64_t pulse_id, const BufferBinaryFormat* buffer); + void write(const ImageMetadata meta, const char* data); }; -#endif //BINARYWRITER_HPP +#endif //IMAGEBINARYWRITER_HPP diff --git a/jf-live-writer/src/ImageBinaryWriter.cpp b/jf-live-writer/src/ImageBinaryWriter.cpp index c3f70f5..d5f9900 100644 --- a/jf-live-writer/src/ImageBinaryWriter.cpp +++ b/jf-live-writer/src/ImageBinaryWriter.cpp @@ -11,9 +11,14 @@ #include "BufferUtils.hpp" using namespace std; +using namespace buffer_config; ImageBinaryWriter::ImageBinaryWriter( - const string& detector_folder): + const string& detector_folder, + const size_t image_n_bytes): + IMAGE_BYTES(image_n_bytes), + IMAGE_SLOT_BYTES(IMAGE_BYTES + sizeof(ImageMetadata)), + MAX_FILE_BYTES(IMAGE_SLOT_BYTES * FILE_MOD), detector_folder_(detector_folder), latest_filename_(detector_folder + "/LATEST"), current_output_filename_(""), @@ -26,20 +31,17 @@ ImageBinaryWriter::~ImageBinaryWriter() close_current_file(); } -void ImageBinaryWriter::write( - const uint64_t pulse_id, - const BufferBinaryFormat* buffer) +void ImageBinaryWriter::write(const ImageMetadata meta, const char* data) { auto current_frame_file = - BufferUtils::get_filename(detector_folder_, module_name_, pulse_id); + BufferUtils::get_image_filename(detector_folder_, meta.pulse_id); if (current_frame_file != current_output_filename_) { open_file(current_frame_file); } size_t n_bytes_offset = - BufferUtils::get_file_frame_index(pulse_id) * - sizeof(BufferBinaryFormat); + BufferUtils::get_file_frame_index(meta.pulse_id) * IMAGE_SLOT_BYTES; auto lseek_result = lseek(output_file_fd_, n_bytes_offset, SEEK_SET); if (lseek_result < 0) { @@ -48,7 +50,7 @@ void ImageBinaryWriter::write( using namespace date; using namespace chrono; err_msg << "[" << system_clock::now() << "]"; - err_msg << "[BufferBinaryWriter::write]"; + err_msg << "[ImageBinaryWriter::write]"; err_msg << " Error while lseek on file "; err_msg << current_output_filename_; err_msg << " for n_bytes_offset "; @@ -58,8 +60,23 @@ void ImageBinaryWriter::write( throw runtime_error(err_msg.str()); } - auto n_bytes = ::write(output_file_fd_, buffer, sizeof(BufferBinaryFormat)); - if (n_bytes < sizeof(BufferBinaryFormat)) { + auto n_bytes_meta = ::write(output_file_fd_, &meta, sizeof(ImageMetadata)); + if (n_bytes_meta < sizeof(ImageMetadata)) { + stringstream err_msg; + + using namespace date; + using namespace chrono; + err_msg << "[" << system_clock::now() << "]"; + err_msg << "[BufferBinaryWriter::write]"; + err_msg << " Error while writing to file "; + err_msg << current_output_filename_ << ": "; + err_msg << strerror(errno) << endl; + + throw runtime_error(err_msg.str()); + } + + auto n_bytes_data = ::write(output_file_fd_, data, IMAGE_BYTES); + if (n_bytes_data < sizeof(IMAGE_BYTES)) { stringstream err_msg; using namespace date; @@ -88,7 +105,7 @@ void ImageBinaryWriter::open_file(const std::string& filename) using namespace date; using namespace chrono; err_msg << "[" << system_clock::now() << "]"; - err_msg << "[BinaryWriter::open_file]"; + err_msg << "[ImageBinaryWriter::open_file]"; err_msg << " Cannot create file "; err_msg << filename << ": "; err_msg << strerror(errno) << endl; @@ -96,45 +113,6 @@ void ImageBinaryWriter::open_file(const std::string& filename) throw runtime_error(err_msg.str()); } - // TODO: Remove context if test successful. - - /** Setting the buffer file size in advance to try to lower the number of - metadata updates on GPFS. */ - { - // TODO: Try instead to use fallocate. - if (lseek(output_file_fd_, MAX_FILE_BYTES, SEEK_SET) < 0) { - stringstream err_msg; - - using namespace date; - using namespace chrono; - err_msg << "[" << system_clock::now() << "]"; - err_msg << "[BufferBinaryWriter::open_file]"; - err_msg << " Error while lseek on end of file "; - err_msg << current_output_filename_; - err_msg << " for MAX_FILE_BYTES "; - err_msg << MAX_FILE_BYTES << ": "; - err_msg << strerror(errno) << endl; - - throw runtime_error(err_msg.str()); - } - - const uint8_t mark = 255; - if(::write(output_file_fd_, &mark, sizeof(mark)) != sizeof(mark)) { - stringstream err_msg; - - using namespace date; - using namespace chrono; - err_msg << "[" << system_clock::now() << "]"; - err_msg << "[BufferBinaryWriter::open_file]"; - err_msg << " Error while writing to file "; - err_msg << current_output_filename_ << ": "; - err_msg << strerror(errno) << endl; - - throw runtime_error(err_msg.str()); - } - } - - current_output_filename_ = filename; } @@ -147,7 +125,7 @@ void ImageBinaryWriter::close_current_file() using namespace date; using namespace chrono; err_msg << "[" << system_clock::now() << "]"; - err_msg << "[BufferBinaryWriter::close_current_file]"; + err_msg << "[ImageBinaryWriter::close_current_file]"; err_msg << " Error while closing file "; err_msg << current_output_filename_ << ": "; err_msg << strerror(errno) << endl; From 2ed8b0fd1b3f3939060f5f1062b1f347501fddac Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Thu, 21 Jan 2021 10:29:19 +0100 Subject: [PATCH 35/61] Add avg throughput to writer stats --- jf-live-writer/include/WriterStats.hpp | 6 ++++-- jf-live-writer/src/WriterStats.cpp | 20 ++++++++++++++------ 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/jf-live-writer/include/WriterStats.hpp b/jf-live-writer/include/WriterStats.hpp index cb023a7..775a46f 100644 --- a/jf-live-writer/include/WriterStats.hpp +++ b/jf-live-writer/include/WriterStats.hpp @@ -8,7 +8,8 @@ class WriterStats { const std::string detector_name_; - size_t stats_modulo_; + const size_t stats_modulo_; + const size_t image_n_bytes_; int image_counter_; uint32_t total_buffer_write_us_; @@ -21,7 +22,8 @@ class WriterStats { public: WriterStats( const std::string &detector_name, - const size_t stats_modulo); + const size_t stats_modulo, + const size_t image_n_bytes); void start_image_write(); void end_image_write(); }; diff --git a/jf-live-writer/src/WriterStats.cpp b/jf-live-writer/src/WriterStats.cpp index 1d67947..51a0922 100644 --- a/jf-live-writer/src/WriterStats.cpp +++ b/jf-live-writer/src/WriterStats.cpp @@ -6,9 +6,11 @@ using namespace chrono; WriterStats::WriterStats( const string& detector_name, - const size_t stats_modulo) : + const size_t stats_modulo, + const size_t image_n_bytes) : detector_name_(detector_name), - stats_modulo_(stats_modulo) + stats_modulo_(stats_modulo), + image_n_bytes_(image_n_bytes) { reset_counters(); } @@ -43,19 +45,25 @@ void WriterStats::end_image_write() void WriterStats::print_stats() { - float avg_buffer_write_us = total_buffer_write_us_ / image_counter_; + const float avg_buffer_write_us = total_buffer_write_us_ / image_counter_; - uint64_t timestamp = time_point_cast( + const uint64_t timestamp = time_point_cast( system_clock::now()).time_since_epoch().count(); + const uint64_t avg_throughput = + // bytes -> megabytes + (image_n_bytes_ / 1024 / 1024) / + // micro seconds -> seconds + (avg_buffer_write_us * 1000 * 1000); + // Output in InfluxDB line protocol cout << "jf_buffer_writer"; cout << ",detector_name=" << detector_name_; cout << " "; cout << "n_written_images=" << image_counter_ << "i"; cout << " ,avg_buffer_write_us=" << avg_buffer_write_us; - cout << ",max_buffer_write_us=" << max_buffer_write_us_ << "i"; - cout << " "; + cout << " ,max_buffer_write_us=" << max_buffer_write_us_ << "i"; + cout << " ,avg_throughput=" << avg_throughput; cout << timestamp; cout << endl; } From 6a1fb79f9f6ca5823dc01d37d9018ae390481133 Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Thu, 21 Jan 2021 10:32:15 +0100 Subject: [PATCH 36/61] Fix live_writer tests --- jf-live-writer/test/CMakeLists.txt | 4 +--- jf-live-writer/test/main.cpp | 2 -- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/jf-live-writer/test/CMakeLists.txt b/jf-live-writer/test/CMakeLists.txt index 1079fc2..8f806b0 100644 --- a/jf-live-writer/test/CMakeLists.txt +++ b/jf-live-writer/test/CMakeLists.txt @@ -2,9 +2,7 @@ add_executable(jf-live-writer-tests main.cpp) target_link_libraries(jf-live-writer-tests jf-live-writer-lib - hdf5 - hdf5_hl - hdf5_cpp zmq + rt gtest ) diff --git a/jf-live-writer/test/main.cpp b/jf-live-writer/test/main.cpp index 69b7f53..e819294 100644 --- a/jf-live-writer/test/main.cpp +++ b/jf-live-writer/test/main.cpp @@ -1,7 +1,5 @@ #include "gtest/gtest.h" -#include "test_BinaryReader.cpp" - using namespace std; int main(int argc, char **argv) { From d0d5776675d037b9e708c1fad657fc2a1c3ccc24 Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Thu, 21 Jan 2021 10:32:50 +0100 Subject: [PATCH 37/61] Adjust the main for the jf-live-writer to ImageWriter --- jf-live-writer/CMakeLists.txt | 7 ++----- jf-live-writer/src/main.cpp | 22 +++++++++++++++------- jf-live-writer/test/test_BinaryReader.cpp | 10 ---------- 3 files changed, 17 insertions(+), 22 deletions(-) delete mode 100644 jf-live-writer/test/test_BinaryReader.cpp diff --git a/jf-live-writer/CMakeLists.txt b/jf-live-writer/CMakeLists.txt index 6028bfe..f96af5a 100644 --- a/jf-live-writer/CMakeLists.txt +++ b/jf-live-writer/CMakeLists.txt @@ -11,11 +11,8 @@ add_executable(jf-live-writer src/main.cpp) set_target_properties(jf-live-writer PROPERTIES OUTPUT_NAME jf_live_writer) target_link_libraries(jf-live-writer jf-live-writer-lib - sf-writer-lib - hdf5 - hdf5_hl - hdf5_cpp - pthread + zmq + rt ) enable_testing() diff --git a/jf-live-writer/src/main.cpp b/jf-live-writer/src/main.cpp index 1b912f8..aa47b8c 100644 --- a/jf-live-writer/src/main.cpp +++ b/jf-live-writer/src/main.cpp @@ -3,8 +3,9 @@ #include #include #include +#include #include "live_writer_config.hpp" -#include "../../jf-buffer-writer/include/BufferStats.hpp" +#include "WriterStats.hpp" using namespace std; @@ -16,15 +17,16 @@ int main (int argc, char *argv[]) if (argc != 3) { cout << endl; cout << "Usage: jf_live_writer [detector_json_filename]" - " [stream_name]" << endl; + " [writer_id]" << endl; cout << "\tdetector_json_filename: detector config file path." << endl; + cout << "\twriter_id: Index of this writer instance." << endl; cout << endl; exit(-1); } - const auto stream_name = string(argv[2]); auto config = BufferUtils::read_json_config(string(argv[1])); + const int writer_id = atoi(argv[2]); auto ctx = zmq_ctx_new(); zmq_ctx_set(ctx, ZMQ_IO_THREADS, LIVE_ZMQ_IO_THREADS); @@ -32,15 +34,21 @@ int main (int argc, char *argv[]) ctx, config.detector_name, "assembler"); RamBuffer ram_buffer(config.detector_name, config.n_modules); - BufferStats stats(config.detector_name, stream_name, STATS_MODULO); - ImageMetadata meta; + const uint64_t image_n_bytes = config.n_modules * MODULE_N_BYTES; + ImageBinaryWriter writer(config.detector_name, image_n_bytes); + + WriterStats stats(config.detector_name, STATS_MODULO, image_n_bytes); + + ImageMetadata meta = {}; while (true) { zmq_recv(receiver, &meta, sizeof(meta), 0); char* data = ram_buffer.read_image(meta.pulse_id); - sender.send(meta, data); + stats.start_image_write(); - stats.record_stats(meta); + writer.write(meta, data); + + stats.end_image_write(); } } diff --git a/jf-live-writer/test/test_BinaryReader.cpp b/jf-live-writer/test/test_BinaryReader.cpp deleted file mode 100644 index cc30157..0000000 --- a/jf-live-writer/test/test_BinaryReader.cpp +++ /dev/null @@ -1,10 +0,0 @@ -#include -#include "gtest/gtest.h" - -TEST(BinaryReader, basic_interaction) { - // TODO: Write some real tests. - auto detector_folder = "test_device"; - auto module_name = "M1"; - BinaryReader reader(detector_folder, module_name); -} - From de7353b3144fcf370d5ebd800889f965c7c58c25 Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Tue, 26 Jan 2021 18:02:35 +0100 Subject: [PATCH 38/61] mask half chip of I0 to look good (some noisy region) --- scripts/retrieve_detector_data.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/retrieve_detector_data.sh b/scripts/retrieve_detector_data.sh index 3c87e49..2fd4614 100755 --- a/scripts/retrieve_detector_data.sh +++ b/scripts/retrieve_detector_data.sh @@ -59,7 +59,7 @@ case ${DETECTOR} in NM=1 DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF04.json ;; -'JF03T01V01') +'JF03T01V02') NM=1 DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF03.json ;; @@ -178,6 +178,7 @@ then elif [ ${DETECTOR} == "JF03T01V02" ] then time taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --add_pixel_mask /sf/bernina/config/jungfrau/pixel_mask/JF03T01V02/pixel_mask_half_chip.h5 +# time taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG elif [ ${DETECTOR} == "JF02T09V02" ] then time taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/jungfrau_create_pedestals.py --filename ${OUTFILE_RAW} --directory ${dir_name} --verbosity DEBUG --number_bad_modules=1 From d0df2677defea40fdbd37cfbc5875db82e7852b3 Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Wed, 17 Feb 2021 12:46:58 +0100 Subject: [PATCH 39/61] Remove old writer --- CMakeLists.txt | 1 - jf-live-writer/include/ImageBinaryWriter.hpp | 36 --- jf-live-writer/src/ImageBinaryWriter.cpp | 143 ----------- sf-writer/CMakeLists.txt | 22 -- sf-writer/README.md | 42 --- sf-writer/include/BufferBinaryReader.hpp | 28 -- sf-writer/include/ImageAssembler.hpp | 53 ---- sf-writer/include/writer_config.hpp | 9 - sf-writer/src/BufferBinaryReader.cpp | 107 -------- sf-writer/src/ImageAssembler.cpp | 186 -------------- sf-writer/src/main.cpp | 158 ------------ sf-writer/test/CMakeLists.txt | 10 - sf-writer/test/main.cpp | 10 - sf-writer/test/mock/data.hpp | 72 ------ sf-writer/test/perf/perf_WriterH5Writer.cpp | 90 ------- sf-writer/test/test_ImageAssembler.cpp | 90 ------- sf-writer/test/test_JFH5Writer.cpp | 254 ------------------- 17 files changed, 1311 deletions(-) delete mode 100644 jf-live-writer/include/ImageBinaryWriter.hpp delete mode 100644 jf-live-writer/src/ImageBinaryWriter.cpp delete mode 100644 sf-writer/CMakeLists.txt delete mode 100644 sf-writer/README.md delete mode 100644 sf-writer/include/BufferBinaryReader.hpp delete mode 100644 sf-writer/include/ImageAssembler.hpp delete mode 100644 sf-writer/include/writer_config.hpp delete mode 100644 sf-writer/src/BufferBinaryReader.cpp delete mode 100644 sf-writer/src/ImageAssembler.cpp delete mode 100644 sf-writer/src/main.cpp delete mode 100644 sf-writer/test/CMakeLists.txt delete mode 100644 sf-writer/test/main.cpp delete mode 100644 sf-writer/test/mock/data.hpp delete mode 100644 sf-writer/test/perf/perf_WriterH5Writer.cpp delete mode 100644 sf-writer/test/test_ImageAssembler.cpp delete mode 100644 sf-writer/test/test_JFH5Writer.cpp diff --git a/CMakeLists.txt b/CMakeLists.txt index 63c25dc..ff244cf 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -33,5 +33,4 @@ add_subdirectory("jf-udp-recv") add_subdirectory("jf-buffer-writer") add_subdirectory("jf-assembler") add_subdirectory("sf-stream") -add_subdirectory("sf-writer") add_subdirectory("jf-live-writer") diff --git a/jf-live-writer/include/ImageBinaryWriter.hpp b/jf-live-writer/include/ImageBinaryWriter.hpp deleted file mode 100644 index ce80655..0000000 --- a/jf-live-writer/include/ImageBinaryWriter.hpp +++ /dev/null @@ -1,36 +0,0 @@ -#ifndef IMAGEBINARYWRITER_HPP -#define IMAGEBINARYWRITER_HPP - -#include - -#include "formats.hpp" - - - -class ImageBinaryWriter { - const size_t IMAGE_BYTES; - const size_t IMAGE_SLOT_BYTES; - const size_t MAX_FILE_BYTES; - const std::string detector_folder_; - std::string latest_filename_; - - std::string current_output_filename_; - int output_file_fd_; - - void open_file(const std::string& filename); - void close_current_file(); - - -public: - ImageBinaryWriter( - const std::string& detector_folder, - const uint64_t image_n_bytes); - - virtual ~ImageBinaryWriter(); - - void write(const ImageMetadata meta, const char* data); - -}; - - -#endif //IMAGEBINARYWRITER_HPP diff --git a/jf-live-writer/src/ImageBinaryWriter.cpp b/jf-live-writer/src/ImageBinaryWriter.cpp deleted file mode 100644 index d5f9900..0000000 --- a/jf-live-writer/src/ImageBinaryWriter.cpp +++ /dev/null @@ -1,143 +0,0 @@ -#include "ImageBinaryWriter.hpp" - -#include -#include -#include "date.h" -#include -#include -#include -#include - -#include "BufferUtils.hpp" - -using namespace std; -using namespace buffer_config; - -ImageBinaryWriter::ImageBinaryWriter( - const string& detector_folder, - const size_t image_n_bytes): - IMAGE_BYTES(image_n_bytes), - IMAGE_SLOT_BYTES(IMAGE_BYTES + sizeof(ImageMetadata)), - MAX_FILE_BYTES(IMAGE_SLOT_BYTES * FILE_MOD), - detector_folder_(detector_folder), - latest_filename_(detector_folder + "/LATEST"), - current_output_filename_(""), - output_file_fd_(-1) -{ -} - -ImageBinaryWriter::~ImageBinaryWriter() -{ - close_current_file(); -} - -void ImageBinaryWriter::write(const ImageMetadata meta, const char* data) -{ - auto current_frame_file = - BufferUtils::get_image_filename(detector_folder_, meta.pulse_id); - - if (current_frame_file != current_output_filename_) { - open_file(current_frame_file); - } - - size_t n_bytes_offset = - BufferUtils::get_file_frame_index(meta.pulse_id) * IMAGE_SLOT_BYTES; - - auto lseek_result = lseek(output_file_fd_, n_bytes_offset, SEEK_SET); - if (lseek_result < 0) { - stringstream err_msg; - - using namespace date; - using namespace chrono; - err_msg << "[" << system_clock::now() << "]"; - err_msg << "[ImageBinaryWriter::write]"; - err_msg << " Error while lseek on file "; - err_msg << current_output_filename_; - err_msg << " for n_bytes_offset "; - err_msg << n_bytes_offset << ": "; - err_msg << strerror(errno) << endl; - - throw runtime_error(err_msg.str()); - } - - auto n_bytes_meta = ::write(output_file_fd_, &meta, sizeof(ImageMetadata)); - if (n_bytes_meta < sizeof(ImageMetadata)) { - stringstream err_msg; - - using namespace date; - using namespace chrono; - err_msg << "[" << system_clock::now() << "]"; - err_msg << "[BufferBinaryWriter::write]"; - err_msg << " Error while writing to file "; - err_msg << current_output_filename_ << ": "; - err_msg << strerror(errno) << endl; - - throw runtime_error(err_msg.str()); - } - - auto n_bytes_data = ::write(output_file_fd_, data, IMAGE_BYTES); - if (n_bytes_data < sizeof(IMAGE_BYTES)) { - stringstream err_msg; - - using namespace date; - using namespace chrono; - err_msg << "[" << system_clock::now() << "]"; - err_msg << "[BufferBinaryWriter::write]"; - err_msg << " Error while writing to file "; - err_msg << current_output_filename_ << ": "; - err_msg << strerror(errno) << endl; - - throw runtime_error(err_msg.str()); - } -} - -void ImageBinaryWriter::open_file(const std::string& filename) -{ - close_current_file(); - - BufferUtils::create_destination_folder(filename); - - output_file_fd_ = ::open(filename.c_str(), O_WRONLY | O_CREAT, - S_IRWXU | S_IWUSR | S_IRGRP | S_IWGRP | S_IROTH); - if (output_file_fd_ < 0) { - stringstream err_msg; - - using namespace date; - using namespace chrono; - err_msg << "[" << system_clock::now() << "]"; - err_msg << "[ImageBinaryWriter::open_file]"; - err_msg << " Cannot create file "; - err_msg << filename << ": "; - err_msg << strerror(errno) << endl; - - throw runtime_error(err_msg.str()); - } - - current_output_filename_ = filename; -} - -void ImageBinaryWriter::close_current_file() -{ - if (output_file_fd_ != -1) { - if (close(output_file_fd_) < 0) { - stringstream err_msg; - - using namespace date; - using namespace chrono; - err_msg << "[" << system_clock::now() << "]"; - err_msg << "[ImageBinaryWriter::close_current_file]"; - err_msg << " Error while closing file "; - err_msg << current_output_filename_ << ": "; - err_msg << strerror(errno) << endl; - - throw runtime_error(err_msg.str()); - } - - output_file_fd_ = -1; - - BufferUtils::update_latest_file( - latest_filename_, current_output_filename_); - - current_output_filename_ = ""; - } -} \ No newline at end of file diff --git a/sf-writer/CMakeLists.txt b/sf-writer/CMakeLists.txt deleted file mode 100644 index 5b520f5..0000000 --- a/sf-writer/CMakeLists.txt +++ /dev/null @@ -1,22 +0,0 @@ -file(GLOB SOURCES - src/*.cpp) - -add_library(sf-writer-lib STATIC ${SOURCES}) -target_include_directories(sf-writer-lib PUBLIC include/) -target_link_libraries(sf-writer-lib - external - core-buffer-lib) - -add_executable(sf-writer src/main.cpp) -set_target_properties(sf-writer PROPERTIES OUTPUT_NAME sf_writer) -target_link_libraries(sf-writer - sf-writer-lib - zmq - hdf5 - hdf5_hl - hdf5_cpp - pthread - ) - -enable_testing() -add_subdirectory(test/) \ No newline at end of file diff --git a/sf-writer/README.md b/sf-writer/README.md deleted file mode 100644 index 606129e..0000000 --- a/sf-writer/README.md +++ /dev/null @@ -1,42 +0,0 @@ -# sf-writer - -sf-writer reads the binary buffer from disk, assembled the images and writes -them down in HDF5 format. - -## Data request ranges - -Data request ranges are composed of: - -- start_pulse_id (first pulse_id to be included in the file) -- stop_pulse_id (last pulse_id to be included in the file) -- pulse_id_step (how many pulses to skip between images.) - -pulse_id_step can be used to write data at different frequencies: - -- pulse_id_step == 1 (100Hz, write very pulse_id) -- pulse_id_step == 2 (50hz, write every second pulse) -- pulse_id_step == 10 (10Hz, write every 10th pulse) - -The next pulse_id to be written is calculated internally as: - -```c++ -auto next_pulse_id = currnet_pulse_id + pulse_id_step; -``` - -The loop criteria for writing is: - -```c++ -for ( - auto curr_pulse_id = start_pulse_id; - curr_pulse_id <= stop_pulse_id; - curr_pulse_id += pulse_id_step -) { - // Write curr_pulse_id to output file. -} -``` - -**Warning** - -If your stop_pulse_id cannot be reached by adding step_pulse_id to -start_pulse_id (start_pulse_id + (n * pulse_id_step) != stop_pulse_id for any n) -it will not be included in the final file. \ No newline at end of file diff --git a/sf-writer/include/BufferBinaryReader.hpp b/sf-writer/include/BufferBinaryReader.hpp deleted file mode 100644 index 0350cba..0000000 --- a/sf-writer/include/BufferBinaryReader.hpp +++ /dev/null @@ -1,28 +0,0 @@ -#ifndef SF_DAQ_BUFFER_BUFFERBINARYREADER_HPP -#define SF_DAQ_BUFFER_BUFFERBINARYREADER_HPP - - -#include - -class BufferBinaryReader { - - const std::string detector_folder_; - const std::string module_name_; - - std::string current_input_file_; - int input_file_fd_; - - void open_file(const std::string& filename); - void close_current_file(); - -public: - BufferBinaryReader(const std::string &detector_folder, - const std::string &module_name); - - ~BufferBinaryReader(); - - void get_block(const uint64_t block_id, BufferBinaryBlock *buffer); -}; - - -#endif //SF_DAQ_BUFFER_BUFFERBINARYREADER_HPP diff --git a/sf-writer/include/ImageAssembler.hpp b/sf-writer/include/ImageAssembler.hpp deleted file mode 100644 index ce98962..0000000 --- a/sf-writer/include/ImageAssembler.hpp +++ /dev/null @@ -1,53 +0,0 @@ -#ifndef SF_DAQ_BUFFER_IMAGEASSEMBLER_HPP -#define SF_DAQ_BUFFER_IMAGEASSEMBLER_HPP - -#include - -#include "buffer_config.hpp" -#include "formats.hpp" - -const uint64_t IA_EMPTY_SLOT_VALUE = 0; - -struct ImageMetadataBlock -{ - uint64_t pulse_id[buffer_config::BUFFER_BLOCK_SIZE]; - uint64_t frame_index[buffer_config::BUFFER_BLOCK_SIZE]; - uint32_t daq_rec[buffer_config::BUFFER_BLOCK_SIZE]; - uint8_t is_good_image[buffer_config::BUFFER_BLOCK_SIZE]; - uint64_t block_start_pulse_id; - uint64_t block_stop_pulse_id; -}; - -class ImageAssembler { - const size_t n_modules_; - const size_t image_buffer_slot_n_bytes_; - - char* image_buffer_; - ImageMetadataBlock* meta_buffer_; - ModuleFrame* frame_meta_buffer_; - std::atomic_int* buffer_status_; - std::atomic_uint64_t* buffer_bunch_id_; - - size_t get_data_offset(const uint64_t slot_id, const int i_module); - size_t get_metadata_offset(const uint64_t slot_id, const int i_module); - -public: - ImageAssembler(const size_t n_modules); - - virtual ~ImageAssembler(); - - bool is_slot_free(const uint64_t bunch_id); - bool is_slot_full(const uint64_t bunch_id); - - void process(const uint64_t bunch_id, - const int i_module, - const BufferBinaryBlock* block_buffer); - - void free_slot(const uint64_t bunch_id); - - ImageMetadataBlock* get_metadata_buffer(const uint64_t bunch_id); - char* get_data_buffer(const uint64_t bunch_id); -}; - - -#endif //SF_DAQ_BUFFER_IMAGEASSEMBLER_HPP diff --git a/sf-writer/include/writer_config.hpp b/sf-writer/include/writer_config.hpp deleted file mode 100644 index 6a60d77..0000000 --- a/sf-writer/include/writer_config.hpp +++ /dev/null @@ -1,9 +0,0 @@ -#include - -namespace writer_config -{ - // MS to retry reading from the image assembler. - const size_t ASSEMBLER_RETRY_MS = 5; - // Number of slots in the reconstruction buffer. - const size_t WRITER_IA_N_SLOTS = 2; -} \ No newline at end of file diff --git a/sf-writer/src/BufferBinaryReader.cpp b/sf-writer/src/BufferBinaryReader.cpp deleted file mode 100644 index e76c5c5..0000000 --- a/sf-writer/src/BufferBinaryReader.cpp +++ /dev/null @@ -1,107 +0,0 @@ -#include "BufferBinaryReader.hpp" - -#include -#include -#include -#include -#include - -#include "BufferUtils.hpp" -#include "writer_config.hpp" -#include "buffer_config.hpp" - -using namespace std; -using namespace writer_config; -using namespace buffer_config; - -BufferBinaryReader::BufferBinaryReader( - const std::string &detector_folder, - const std::string &module_name) : - detector_folder_(detector_folder), - module_name_(module_name), - current_input_file_(""), - input_file_fd_(-1) -{} - -BufferBinaryReader::~BufferBinaryReader() -{ - close_current_file(); -} - -void BufferBinaryReader::get_block( - const uint64_t block_id, BufferBinaryBlock* buffer) -{ - uint64_t block_start_pulse_id = block_id * BUFFER_BLOCK_SIZE; - auto current_block_file = BufferUtils::get_filename( - detector_folder_, module_name_, block_start_pulse_id); - - if (current_block_file != current_input_file_) { - open_file(current_block_file); - } - - size_t file_start_index = - BufferUtils::get_file_frame_index(block_start_pulse_id); - size_t n_bytes_offset = file_start_index * sizeof(BufferBinaryFormat); - - auto lseek_result = lseek(input_file_fd_, n_bytes_offset, SEEK_SET); - if (lseek_result < 0) { - stringstream err_msg; - - err_msg << "[BufferBinaryReader::get_block]"; - err_msg << " Error while lseek on file "; - err_msg << current_input_file_ << " for n_bytes_offset "; - err_msg << n_bytes_offset << ": " << strerror(errno) << endl; - - throw runtime_error(err_msg.str()); - } - - auto n_bytes = ::read(input_file_fd_, buffer, - sizeof(BufferBinaryFormat) * BUFFER_BLOCK_SIZE); - - if (n_bytes < sizeof(BufferBinaryFormat)) { - stringstream err_msg; - - err_msg << "[BufferBinaryReader::get_block]"; - err_msg << " Error while reading from file "; - err_msg << current_input_file_ << ": " << strerror(errno) << endl; - - throw runtime_error(err_msg.str()); - } -} - -void BufferBinaryReader::open_file(const std::string& filename) -{ - close_current_file(); - - input_file_fd_ = open(filename.c_str(), O_RDONLY); - - if (input_file_fd_ < 0) { - stringstream err_msg; - - err_msg << "[BufferBinaryReader::open_file]"; - err_msg << " Cannot open file " << filename << ": "; - err_msg << strerror(errno) << endl; - - throw runtime_error(err_msg.str()); - } - - current_input_file_ = filename; -} - -void BufferBinaryReader::close_current_file() -{ - if (input_file_fd_ != -1) { - if (close(input_file_fd_) < 0) { - stringstream err_msg; - - err_msg << "[BinaryWriter::close_current_file]"; - err_msg << " Error while closing file " << current_input_file_; - err_msg << ": " << strerror(errno) << endl; - - throw runtime_error(err_msg.str()); - } - - input_file_fd_ = -1; - current_input_file_ = ""; - } -} diff --git a/sf-writer/src/ImageAssembler.cpp b/sf-writer/src/ImageAssembler.cpp deleted file mode 100644 index 1a9a5e6..0000000 --- a/sf-writer/src/ImageAssembler.cpp +++ /dev/null @@ -1,186 +0,0 @@ -#include - -#include "ImageAssembler.hpp" -#include "writer_config.hpp" -#include "buffer_config.hpp" - -using namespace std; -using namespace writer_config; -using namespace buffer_config; - -ImageAssembler::ImageAssembler(const size_t n_modules) : - n_modules_(n_modules), - image_buffer_slot_n_bytes_(BUFFER_BLOCK_SIZE * MODULE_N_BYTES * n_modules_) -{ - image_buffer_ = new char[WRITER_IA_N_SLOTS * image_buffer_slot_n_bytes_]; - meta_buffer_ = new ImageMetadataBlock[WRITER_IA_N_SLOTS]; - frame_meta_buffer_ = - new ModuleFrame[WRITER_IA_N_SLOTS * n_modules * BUFFER_BLOCK_SIZE]; - buffer_status_ = new atomic_int[WRITER_IA_N_SLOTS]; - buffer_bunch_id_ = new atomic_uint64_t[WRITER_IA_N_SLOTS]; - - for (size_t i=0; i < WRITER_IA_N_SLOTS; i++) { - free_slot(i); - } -} - -ImageAssembler::~ImageAssembler() -{ - delete[] image_buffer_; - delete[] meta_buffer_; -} - -bool ImageAssembler::is_slot_free(const uint64_t bunch_id) -{ - auto slot_id = bunch_id % WRITER_IA_N_SLOTS; - - uint64_t slot_bunch_id = IA_EMPTY_SLOT_VALUE; - if (buffer_bunch_id_[slot_id].compare_exchange_strong( - slot_bunch_id, bunch_id)) { - return true; - } - - auto is_free = buffer_status_[slot_id].load(memory_order_relaxed) > 0; - return is_free && (slot_bunch_id == bunch_id); -} - -bool ImageAssembler::is_slot_full(const uint64_t bunch_id) -{ - auto slot_id = bunch_id % WRITER_IA_N_SLOTS; - return buffer_status_[slot_id].load(memory_order_relaxed) == 0; -} - -size_t ImageAssembler::get_data_offset( - const uint64_t slot_id, const int i_module) -{ - size_t slot_i_offset = slot_id * image_buffer_slot_n_bytes_; - size_t module_i_offset = i_module * MODULE_N_BYTES; - - return slot_i_offset + module_i_offset; -} - -size_t ImageAssembler::get_metadata_offset( - const uint64_t slot_id, const int i_module) -{ - size_t n_metadata_in_slot = n_modules_ * BUFFER_BLOCK_SIZE; - size_t slot_m_offset = slot_id * n_metadata_in_slot; - size_t module_m_offset = i_module; - - return slot_m_offset + module_m_offset; -} - -void ImageAssembler::process( - const uint64_t bunch_id, - const int i_module, - const BufferBinaryBlock* block_buffer) -{ - const auto slot_id = bunch_id % WRITER_IA_N_SLOTS; - - auto meta_offset = get_metadata_offset(slot_id, i_module); - const auto meta_offset_step = n_modules_; - - auto image_offset = get_data_offset(slot_id, i_module); - const auto image_offset_step = MODULE_N_BYTES * n_modules_; - - for (const auto& frame : block_buffer->frame) { - - memcpy( - &(frame_meta_buffer_[meta_offset]), - &(frame.meta), - sizeof(ModuleFrame)); - - meta_offset += meta_offset_step; - - memcpy( - image_buffer_ + image_offset, - &(frame.data[0]), - MODULE_N_BYTES); - - image_offset += image_offset_step; - } - - buffer_status_[slot_id].fetch_sub(1, memory_order_relaxed); -} - -void ImageAssembler::free_slot(const uint64_t bunch_id) -{ - auto slot_id = bunch_id % WRITER_IA_N_SLOTS; - buffer_status_[slot_id].store(n_modules_, memory_order_relaxed); - buffer_bunch_id_[slot_id].store(IA_EMPTY_SLOT_VALUE, memory_order_relaxed); -} - -ImageMetadataBlock* ImageAssembler::get_metadata_buffer(const uint64_t bunch_id) -{ - const auto slot_id = bunch_id % WRITER_IA_N_SLOTS; - - auto& image_pulse_id = meta_buffer_[slot_id].pulse_id; - auto& image_frame_index = meta_buffer_[slot_id].frame_index; - auto& image_daq_rec = meta_buffer_[slot_id].daq_rec; - auto& image_is_good_frame = meta_buffer_[slot_id].is_good_image; - - auto meta_offset = get_metadata_offset(slot_id, 0); - const auto meta_offset_step = 1; - - uint64_t start_pulse_id = bunch_id * BUFFER_BLOCK_SIZE; - meta_buffer_[slot_id].block_start_pulse_id = start_pulse_id; - - uint64_t stop_pulse_id = start_pulse_id + BUFFER_BLOCK_SIZE - 1; - meta_buffer_[slot_id].block_stop_pulse_id = stop_pulse_id; - - for (size_t i_pulse=0; i_pulse < BUFFER_BLOCK_SIZE; i_pulse++) { - - auto is_pulse_init = false; - image_is_good_frame[i_pulse] = 1; - image_pulse_id[i_pulse] = 0; - - for (size_t i_module=0; i_module < n_modules_; i_module++) { - - auto& frame_meta = frame_meta_buffer_[meta_offset]; - auto is_good_frame = - frame_meta.n_recv_packets == JF_N_PACKETS_PER_FRAME; - - if (!is_good_frame) { - image_is_good_frame[i_pulse] = 0; - // TODO: Update meta_offset only once in the loop. - meta_offset += meta_offset_step; - continue; - } - - if (!is_pulse_init) { - image_pulse_id[i_pulse] = frame_meta.pulse_id; - image_frame_index[i_pulse] = frame_meta.frame_index; - image_daq_rec[i_pulse] = frame_meta.daq_rec; - - is_pulse_init = true; - } - - if (image_is_good_frame[i_pulse] == 1) { - if (frame_meta.pulse_id != image_pulse_id[i_pulse]) { - image_is_good_frame[i_pulse] = 0; - } - - if (frame_meta.frame_index != image_frame_index[i_pulse]) { - image_is_good_frame[i_pulse] = 0; - } - - if (frame_meta.daq_rec != image_daq_rec[i_pulse]) { - image_is_good_frame[i_pulse] = 0; - } - - if (frame_meta.n_recv_packets != JF_N_PACKETS_PER_FRAME) { - image_is_good_frame[i_pulse] = 0; - } - } - - meta_offset += meta_offset_step; - } - } - - return &(meta_buffer_[slot_id]); -} - -char* ImageAssembler::get_data_buffer(const uint64_t bunch_id) -{ - auto slot_id = bunch_id % WRITER_IA_N_SLOTS; - return image_buffer_ + (slot_id * image_buffer_slot_n_bytes_); -} diff --git a/sf-writer/src/main.cpp b/sf-writer/src/main.cpp deleted file mode 100644 index 643d47f..0000000 --- a/sf-writer/src/main.cpp +++ /dev/null @@ -1,158 +0,0 @@ -#include -#include -#include -#include -#include - -#include "date.h" -#include "zmq.h" -#include "writer_config.hpp" -#include "buffer_config.hpp" -#include "bitshuffle/bitshuffle.h" -#include "JFH5Writer.hpp" -#include "ImageAssembler.hpp" -#include "BufferBinaryReader.hpp" - -using namespace std; -using namespace chrono; -using namespace writer_config; -using namespace buffer_config; - -void read_buffer( - const string detector_folder, - const string module_name, - const int i_module, - const vector& buffer_blocks, - ImageAssembler& image_assembler) -{ - BufferBinaryReader block_reader(detector_folder, module_name); - auto block_buffer = new BufferBinaryBlock(); - - for (uint64_t block_id:buffer_blocks) { - - while(!image_assembler.is_slot_free(block_id)) { - this_thread::sleep_for(chrono::milliseconds(ASSEMBLER_RETRY_MS)); - } - - auto start_time = steady_clock::now(); - - block_reader.get_block(block_id, block_buffer); - - auto end_time = steady_clock::now(); - uint64_t read_us_duration = duration_cast( - end_time-start_time).count(); - - start_time = steady_clock::now(); - - image_assembler.process(block_id, i_module, block_buffer); - - end_time = steady_clock::now(); - uint64_t compose_us_duration = duration_cast( - end_time-start_time).count(); - - cout << "sf_writer:avg_read_us "; - cout << read_us_duration / BUFFER_BLOCK_SIZE << endl; - cout << "sf_writer:avg_assemble_us "; - cout << compose_us_duration / BUFFER_BLOCK_SIZE << endl; - } - - delete block_buffer; -} - -int main (int argc, char *argv[]) -{ - if (argc != 7) { - cout << endl; - cout << "Usage: sf_writer [output_file] [detector_folder] [n_modules]"; - cout << " [start_pulse_id] [stop_pulse_id] [pulse_id_step]"; - cout << endl; - cout << "\toutput_file: Complete path to the output file." << endl; - cout << "\tdetector_folder: Absolute path to detector buffer." << endl; - cout << "\tn_modules: number of modules" << endl; - cout << "\tstart_pulse_id: Start pulse_id of retrieval." << endl; - cout << "\tstop_pulse_id: Stop pulse_id of retrieval." << endl; - cout << "\tpulse_id_step: 1==100Hz, 2==50hz, 4==25Hz.." << endl; - cout << endl; - - exit(-1); - } - - string output_file = string(argv[1]); - const string detector_folder = string(argv[2]); - size_t n_modules = atoi(argv[3]); - uint64_t start_pulse_id = (uint64_t) atoll(argv[4]); - uint64_t stop_pulse_id = (uint64_t) atoll(argv[5]); - int pulse_id_step = atoi(argv[6]); - - // Align start (up) and stop(down) pulse_id with pulse_id_step. - if (start_pulse_id % pulse_id_step != 0) { - start_pulse_id += pulse_id_step - (start_pulse_id % pulse_id_step); - } - if (stop_pulse_id % pulse_id_step != 0) { - stop_pulse_id -= (start_pulse_id % pulse_id_step); - } - - uint64_t start_block = start_pulse_id / BUFFER_BLOCK_SIZE; - uint64_t stop_block = stop_pulse_id / BUFFER_BLOCK_SIZE; - - // Generate list of buffer blocks that need to be loaded. - std::vector buffer_blocks; - for (uint64_t i_block=start_block; i_block <= stop_block; i_block++) { - buffer_blocks.push_back(i_block); - } - - ImageAssembler image_assembler(n_modules); - - std::vector reading_threads(n_modules); - for (size_t i_module=0; i_module( - end_time-start_time).count(); - - image_assembler.free_slot(block_id); - - cout << "sf_writer:avg_write_us "; - cout << write_us_duration / BUFFER_BLOCK_SIZE << endl; - } - - for (auto& reading_thread : reading_threads) { - if (reading_thread.joinable()) { - reading_thread.join(); - } - } - - return 0; -} diff --git a/sf-writer/test/CMakeLists.txt b/sf-writer/test/CMakeLists.txt deleted file mode 100644 index 2b9f4f0..0000000 --- a/sf-writer/test/CMakeLists.txt +++ /dev/null @@ -1,10 +0,0 @@ -add_executable(sf-writer-tests main.cpp) - -target_link_libraries(sf-writer-tests - sf-writer-lib - hdf5 - hdf5_hl - hdf5_cpp - zmq - gtest - ) diff --git a/sf-writer/test/main.cpp b/sf-writer/test/main.cpp deleted file mode 100644 index 65c6feb..0000000 --- a/sf-writer/test/main.cpp +++ /dev/null @@ -1,10 +0,0 @@ -#include "gtest/gtest.h" -#include "test_JFH5Writer.cpp" -#include "test_ImageAssembler.cpp" - -using namespace std; - -int main(int argc, char **argv) { - ::testing::InitGoogleTest(&argc, argv); - return RUN_ALL_TESTS(); -} diff --git a/sf-writer/test/mock/data.hpp b/sf-writer/test/mock/data.hpp deleted file mode 100644 index 0588de5..0000000 --- a/sf-writer/test/mock/data.hpp +++ /dev/null @@ -1,72 +0,0 @@ -#ifndef SF_DAQ_BUFFER_DATA_HPP -#define SF_DAQ_BUFFER_DATA_HPP - -#include -#include - -#include "buffer_config.hpp" - -auto get_test_block_metadata( - const uint64_t start_pulse_id, - const uint64_t stop_pulse_id, - const int pulse_id_step) -{ - using namespace std; - using namespace buffer_config; - - auto metadata = make_shared(); - - uint64_t block_id = start_pulse_id / BUFFER_BLOCK_SIZE; - - metadata->block_start_pulse_id = block_id * BUFFER_BLOCK_SIZE; - metadata->block_stop_pulse_id = - metadata->block_start_pulse_id + BUFFER_BLOCK_SIZE - 1; - - if (metadata->block_stop_pulse_id < stop_pulse_id) { - throw runtime_error("stop_pulse_id in next block"); - } - - auto offset = start_pulse_id - metadata->block_start_pulse_id; - - for (uint64_t pulse_id = start_pulse_id; - pulse_id <= stop_pulse_id; - pulse_id++, offset++) { - - if (pulse_id % pulse_id_step != 0) { - metadata->is_good_image[offset] = 0; - continue; - } - - metadata->pulse_id[offset] = pulse_id; - metadata->frame_index[offset] = pulse_id + 10; - metadata->daq_rec[offset] = pulse_id + 100; - metadata->is_good_image[offset] = 1; - } - - return metadata; -} - -auto get_test_block_data(const size_t n_modules) -{ - using namespace std; - using namespace buffer_config; - - auto image_buffer = make_unique( - MODULE_N_PIXELS * n_modules * BUFFER_BLOCK_SIZE); - - for (int i_block=0; i_block<=BUFFER_BLOCK_SIZE; i_block++) { - for (int i_module=0; i_module -#include "buffer_config.hpp" -#include "zmq.h" -#include -#include -#include -#include -#include "WriterH5Writer.hpp" - -using namespace std; -using namespace core_buffer; - - -int main (int argc, char *argv[]) -{ - if (argc != 4) { - cout << endl; - cout << "Usage: sf_writer "; - cout << " [output_file] [start_pulse_id] [stop_pulse_id]"; - cout << endl; - cout << "\toutput_file: Complete path to the output file." << endl; - cout << "\tstart_pulse_id: Start pulse_id of retrieval." << endl; - cout << "\tstop_pulse_id: Stop pulse_id of retrieval." << endl; - cout << endl; - - exit(-1); - } - - string output_file = string(argv[1]); - uint64_t start_pulse_id = (uint64_t) atoll(argv[2]); - uint64_t stop_pulse_id = (uint64_t) atoll(argv[3]); - - size_t n_modules = 32; - - size_t n_frames = stop_pulse_id - start_pulse_id; - WriterH5Writer writer(output_file, n_frames, n_modules); - - // TODO: Remove stats trash. - int i_write = 0; - size_t total_ms = 0; - size_t max_ms = 0; - size_t min_ms = 10000; // 10 seconds should be a safe first value. - - auto start_time = chrono::steady_clock::now(); - - auto metadata = make_shared(); - auto data = make_unique(MODULE_N_BYTES*n_modules); - - auto current_pulse_id = start_pulse_id; - while (current_pulse_id <= stop_pulse_id) { - - writer.write(metadata.get(), data.get()); - current_pulse_id++; - - i_write++; - - auto end_time = chrono::steady_clock::now(); - - // TODO: Some poor statistics. - - auto ms_duration = chrono::duration_cast( - end_time-start_time).count(); - total_ms += ms_duration; - - if (ms_duration > max_ms) { - max_ms = ms_duration; - } - - if (ms_duration < min_ms) { - min_ms = ms_duration; - } - - if (i_write==100) { - cout << "avg_write_ms " << total_ms / 100; - cout << " min_write_ms " << min_ms; - cout << " max_write_ms " << max_ms << endl; - - i_write = 0; - total_ms = 0; - max_ms = 0; - min_ms = 0; - } - - start_time = chrono::steady_clock::now(); - } - - writer.close_file(); - - return 0; -} diff --git a/sf-writer/test/test_ImageAssembler.cpp b/sf-writer/test/test_ImageAssembler.cpp deleted file mode 100644 index 97bdbb3..0000000 --- a/sf-writer/test/test_ImageAssembler.cpp +++ /dev/null @@ -1,90 +0,0 @@ -#include - -#include "ImageAssembler.hpp" -#include "gtest/gtest.h" - -using namespace std; -using namespace buffer_config; - -TEST(ImageAssembler, basic_interaction) -{ - size_t n_modules = 3; - uint64_t bunch_id = 0; - - ImageAssembler assembler(n_modules); - - ASSERT_EQ(assembler.is_slot_free(bunch_id), true); - - auto buffer_block = make_unique(); - auto buffer_ptr = buffer_block.get(); - - for (size_t i_module=0; i_module < n_modules; i_module++) { - assembler.process(bunch_id, i_module, buffer_ptr); - } - - ASSERT_EQ(assembler.is_slot_full(bunch_id), true); - - auto metadata = assembler.get_metadata_buffer(bunch_id); - auto data = assembler.get_data_buffer(bunch_id); - - assembler.free_slot(bunch_id); - ASSERT_EQ(assembler.is_slot_free(bunch_id), true); - - for (size_t i_pulse = 0; i_pulse < BUFFER_BLOCK_SIZE; i_pulse++) { - ASSERT_EQ(metadata->is_good_image[i_pulse], 0); - } -} - -TEST(ImageAssembler, reconstruction) -{ - size_t n_modules = 2; - uint64_t bunch_id = 0; - - ImageAssembler assembler(n_modules); - - ASSERT_EQ(assembler.is_slot_free(bunch_id), true); - - auto buffer_block = make_unique(); - auto buffer_ptr = buffer_block.get(); - - for (size_t i_module=0; i_module < n_modules; i_module++) { - - for (size_t i_pulse=0; i_pulse < BUFFER_BLOCK_SIZE; i_pulse++) { - auto& frame_meta = buffer_block->frame[i_pulse].meta; - - frame_meta.pulse_id = 100 + i_pulse; - frame_meta.daq_rec = 1000 + i_pulse; - frame_meta.frame_index = 10000 + i_pulse; - frame_meta.n_recv_packets = JF_N_PACKETS_PER_FRAME; - - for (size_t i_pixel=0; i_pixel < MODULE_N_PIXELS; i_pixel++) { - buffer_block->frame[i_pulse].data[i_pixel] = - (i_module * 10) + (i_pixel % 100); - } - } - - assembler.process(bunch_id, i_module, buffer_ptr); - } - - ASSERT_EQ(assembler.is_slot_full(bunch_id), true); - - auto metadata = assembler.get_metadata_buffer(bunch_id); - auto data = assembler.get_data_buffer(bunch_id); - - assembler.free_slot(bunch_id); - ASSERT_EQ(assembler.is_slot_free(bunch_id), true); - - ASSERT_EQ(metadata->block_start_pulse_id, 0); - ASSERT_EQ(metadata->block_stop_pulse_id, BUFFER_BLOCK_SIZE-1); - - for (size_t i_pulse = 0; i_pulse < BUFFER_BLOCK_SIZE; i_pulse++) { - ASSERT_EQ(metadata->pulse_id[i_pulse], 100 + i_pulse); - ASSERT_EQ(metadata->daq_rec[i_pulse], 1000 + i_pulse); - ASSERT_EQ(metadata->frame_index[i_pulse], 10000 + i_pulse); - ASSERT_EQ(metadata->is_good_image[i_pulse], 1); - - for (size_t i_module=0; i_module < n_modules; i_module++) { - // TODO: Check assembled image. - } - } -} diff --git a/sf-writer/test/test_JFH5Writer.cpp b/sf-writer/test/test_JFH5Writer.cpp deleted file mode 100644 index 2a2b5a9..0000000 --- a/sf-writer/test/test_JFH5Writer.cpp +++ /dev/null @@ -1,254 +0,0 @@ -#include - -#include "JFH5Writer.hpp" -#include "gtest/gtest.h" -#include "bitshuffle/bitshuffle.h" -#include "mock/data.hpp" - -using namespace std; -using namespace buffer_config; - -TEST(JFH5Writer, basic_interaction) -{ - size_t n_modules = 2; - uint64_t start_pulse_id = 1; - uint64_t stop_pulse_id = 5; - - auto data = make_unique(n_modules*MODULE_N_BYTES*BUFFER_BLOCK_SIZE); - auto metadata = make_shared(); - - // Needed by writer. - metadata->block_start_pulse_id = 0; - metadata->block_stop_pulse_id = BUFFER_BLOCK_SIZE - 1; - - JFH5Writer writer("ignore.h5", "detector", - n_modules, start_pulse_id, stop_pulse_id, 1); - writer.write(metadata.get(), data.get()); -} - -TEST(JFH5Writer, test_writing) -{ - size_t n_modules = 2; - uint64_t start_pulse_id = 5; - uint64_t stop_pulse_id = 10; - auto n_images = stop_pulse_id - start_pulse_id + 1; - - auto meta = get_test_block_metadata(start_pulse_id, stop_pulse_id, 1); - auto data = get_test_block_data(n_modules); - - string detector_name = "detector"; - - // The writer closes the file on destruction. - { - JFH5Writer writer( - "ignore.h5", detector_name, - n_modules, start_pulse_id, stop_pulse_id, 1); - writer.write(meta.get(), (char*)(&data[0])); - } - - H5::H5File reader("ignore.h5", H5F_ACC_RDONLY); - auto image_dataset = reader.openDataSet("/data/detector/data"); - image_dataset.read(&data[0], H5::PredType::NATIVE_UINT16); - - for (int i_image=0; i_image < n_images; i_image++) { - for (int i_module=0; i_module(n_images); - auto pulse_id_dataset = reader.openDataSet("/data/detector/pulse_id"); - pulse_id_dataset.read(&pulse_id_data[0], H5::PredType::NATIVE_UINT64); - - auto frame_index_data = make_unique(n_images); - auto frame_index_dataset = reader.openDataSet("/data/detector/frame_index"); - frame_index_dataset.read(&frame_index_data[0], H5::PredType::NATIVE_UINT64); - - auto daq_rec_data = make_unique(n_images); - auto daq_rec_dataset = reader.openDataSet("/data/detector/daq_rec"); - daq_rec_dataset.read(&daq_rec_data[0], H5::PredType::NATIVE_UINT32); - - auto is_good_frame_data = make_unique(n_images); - auto is_good_frame_dataset = - reader.openDataSet("/data/detector/is_good_frame"); - is_good_frame_dataset.read( - &is_good_frame_data[0], H5::PredType::NATIVE_UINT8); - - auto name_dataset = reader.openDataSet("/general/detector_name"); - string read_detector_name; - name_dataset.read(read_detector_name, name_dataset.getDataType()); - - ASSERT_EQ(detector_name, read_detector_name); - - for (uint64_t pulse_id=start_pulse_id; - pulse_id<=stop_pulse_id; - pulse_id++) { - - ASSERT_EQ(pulse_id_data[pulse_id - start_pulse_id], pulse_id); - ASSERT_EQ(frame_index_data[pulse_id - start_pulse_id], pulse_id + 10); - ASSERT_EQ(daq_rec_data[pulse_id - start_pulse_id], pulse_id + 100); - ASSERT_EQ(is_good_frame_data[pulse_id - start_pulse_id], 1); - } -} - -TEST(JFH5Writer, test_step_pulse_id) -{ - // Start pulse id (5) larger than stop pulse id (4). - ASSERT_THROW(JFH5Writer writer("ignore.h5", "d", 1 , 5, 4, 1), - runtime_error); - - // Start pulse id (5) is equal to stop pulse id (5). - ASSERT_NO_THROW(JFH5Writer writer("ignore.h5", "d", 1, 5, 5, 1)); - - // The step is exactly on start nad stop pulse id. - ASSERT_NO_THROW(JFH5Writer writer("ignore.h5", "d", 1, 5, 5, 5)); - - // No pulses in given range with step = 10 - ASSERT_THROW(JFH5Writer writer("ignore.h5", "d", 1, 1, 9, 10), - runtime_error); - - // Stop pulse id is divisible by step, but start is not. - ASSERT_THROW(JFH5Writer writer("ignore.h5", "d", 1, 5, 10, 10), - runtime_error); - - // Start pulse id is divisible by step, but stop is not. - ASSERT_THROW(JFH5Writer writer("ignore.h5", "d", 1, 10, 19, 10), - runtime_error); - - // Should be ok. - ASSERT_NO_THROW(JFH5Writer("ignore.h5", "d", 1, 1234, 1234, 1)); - // Should be ok. - ASSERT_NO_THROW(JFH5Writer("ignore.h5", "d", 1, 1234, 4567, 1)); - // Should be ok. - ASSERT_NO_THROW(JFH5Writer("ignore.h5", "d", 1, 4, 4, 4)); - - // stop smaller than start. - ASSERT_THROW(JFH5Writer("ignore.h5", "d", 1, 1234, 1233, 1), - runtime_error); - // step is not valid for 100Hz. - ASSERT_THROW(JFH5Writer("ignore.h5", "d", 1, 1234, 1234, 3), - runtime_error); - // start not divisible by step. - ASSERT_THROW(JFH5Writer("ignore.h5", "d", 1, 10, 10, 4), - runtime_error); - // stop not divisible by step - ASSERT_THROW(JFH5Writer("ignore.h5", "d", 1, 8, 10, 4), - runtime_error); -} - -void test_writing_with_step( - uint64_t start_pulse_id, uint64_t stop_pulse_id, size_t step) -{ - size_t n_modules = 3; - - size_t n_images = 1; - n_images += (stop_pulse_id / step); - n_images -= start_pulse_id / step; - - auto meta = get_test_block_metadata(start_pulse_id, stop_pulse_id, step); - auto data = get_test_block_data(n_modules); - - // Verify the metadata has the layout we want to test (50Hz). - for (size_t i_pulse=0; i_pulsepulse_id[i_pulse], 500 + i_pulse); - } else { - ASSERT_EQ(meta->pulse_id[i_pulse], 0); - } - } - - string path_root = "/path/to/"; - string expected_detector_name = "detector"; - - // The writer closes the file on destruction. - { - JFH5Writer writer( - "ignore.h5", path_root + expected_detector_name, - n_modules, start_pulse_id, stop_pulse_id, step); - writer.write(meta.get(), (char*)(&data[0])); - } - - H5::H5File reader("ignore.h5", H5F_ACC_RDONLY); - auto image_dataset = reader.openDataSet("/data/detector/data"); - image_dataset.read(&data[0], H5::PredType::NATIVE_UINT16); - - hsize_t dims[3]; - image_dataset.getSpace().getSimpleExtentDims(dims); - ASSERT_EQ(dims[0], n_images); - ASSERT_EQ(dims[1], n_modules * MODULE_Y_SIZE); - ASSERT_EQ(dims[2], MODULE_X_SIZE); - - auto pulse_id_data = make_unique(n_images); - auto pulse_id_dataset = reader.openDataSet("/data/detector/pulse_id"); - pulse_id_dataset.read(&pulse_id_data[0], H5::PredType::NATIVE_UINT64); - pulse_id_dataset.getSpace().getSimpleExtentDims(dims); - ASSERT_EQ(dims[0], n_images); - ASSERT_EQ(dims[1], 1); - - auto frame_index_data = make_unique(n_images); - auto frame_index_dataset = reader.openDataSet("/data/detector/frame_index"); - frame_index_dataset.read(&frame_index_data[0], H5::PredType::NATIVE_UINT64); - frame_index_dataset.getSpace().getSimpleExtentDims(dims); - ASSERT_EQ(dims[0], n_images); - ASSERT_EQ(dims[1], 1); - - auto daq_rec_data = make_unique(n_images); - auto daq_rec_dataset = reader.openDataSet("/data/detector/daq_rec"); - daq_rec_dataset.read(&daq_rec_data[0], H5::PredType::NATIVE_UINT32); - daq_rec_dataset.getSpace().getSimpleExtentDims(dims); - ASSERT_EQ(dims[0], n_images); - ASSERT_EQ(dims[1], 1); - - auto is_good_frame_data = make_unique(n_images); - auto is_good_frame_dataset = - reader.openDataSet("/data/detector/is_good_frame"); - is_good_frame_dataset.read( - &is_good_frame_data[0], H5::PredType::NATIVE_UINT8); - is_good_frame_dataset.getSpace().getSimpleExtentDims(dims); - ASSERT_EQ(dims[0], n_images); - ASSERT_EQ(dims[1], 1); - - auto name_dataset = reader.openDataSet("/general/detector_name"); - string read_detector_name; - name_dataset.read(read_detector_name, name_dataset.getDataType()); - - ASSERT_EQ(expected_detector_name, read_detector_name); - - uint64_t i_pulse = 0; - for (uint64_t pulse_id=start_pulse_id; - pulse_id<=stop_pulse_id; - pulse_id++) { - - if (pulse_id % step != 0) { - continue; - } - - ASSERT_EQ(pulse_id_data[i_pulse], pulse_id); - ASSERT_EQ(frame_index_data[i_pulse], pulse_id + 10); - ASSERT_EQ(daq_rec_data[i_pulse], pulse_id + 100); - ASSERT_EQ(is_good_frame_data[i_pulse], 1); - - i_pulse++; - } -} - -TEST(JFH5Writer, test_writing_with_step) -{ - // TODO: Write with any number of steps. - // 100Hz - test_writing_with_step(500, 599, 1); - // 50Hz - test_writing_with_step(500, 598, 2); - // 25Hz - test_writing_with_step(500, 596, 4); - // 10Hz - test_writing_with_step(500, 590, 10); - // 1Hz - test_writing_with_step(500, 500, 100); -} From 11ba1c399c58eddcd9b490ab447c4487a6f050e1 Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Wed, 17 Feb 2021 12:47:23 +0100 Subject: [PATCH 40/61] Moved H5 writer to new project --- {sf-writer => jf-live-writer}/include/JFH5Writer.hpp | 0 {sf-writer => jf-live-writer}/src/JFH5Writer.cpp | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename {sf-writer => jf-live-writer}/include/JFH5Writer.hpp (100%) rename {sf-writer => jf-live-writer}/src/JFH5Writer.cpp (100%) diff --git a/sf-writer/include/JFH5Writer.hpp b/jf-live-writer/include/JFH5Writer.hpp similarity index 100% rename from sf-writer/include/JFH5Writer.hpp rename to jf-live-writer/include/JFH5Writer.hpp diff --git a/sf-writer/src/JFH5Writer.cpp b/jf-live-writer/src/JFH5Writer.cpp similarity index 100% rename from sf-writer/src/JFH5Writer.cpp rename to jf-live-writer/src/JFH5Writer.cpp From cdfff368248ffb1e64cd1dce6faeacb3673f00f7 Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Mon, 22 Feb 2021 10:45:23 +0100 Subject: [PATCH 41/61] Broker agent stream format --- jf-live-writer/include/broker_format.hpp | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 jf-live-writer/include/broker_format.hpp diff --git a/jf-live-writer/include/broker_format.hpp b/jf-live-writer/include/broker_format.hpp new file mode 100644 index 0000000..9943d9c --- /dev/null +++ b/jf-live-writer/include/broker_format.hpp @@ -0,0 +1,21 @@ +#ifndef SF_DAQ_BUFFER_BROKER_FORMAT_HPP +#define SF_DAQ_BUFFER_BROKER_FORMAT_HPP + +#include "formats.hpp" + +const static uint8_t OP_CONTINUE = 0; +const static uint8_t OP_START = 1; +const static uint8_t OP_END = 2; + +#pragma pack(push) +#pragma pack(1) +struct StoreStream { + uint8_t op_code; + uint32_t i_image; + uint32_t n_images; + int64_t run_id; + + ImageMetadata image_metadata; +}; +#pragma pack(pop) +#endif //SF_DAQ_BUFFER_BROKER_FORMAT_HPP From 29d3e4c1cb09244c8b6a4b0a41808839eba0bada Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Wed, 24 Feb 2021 08:56:29 +0100 Subject: [PATCH 42/61] Rewrite the writer main function --- jf-live-writer/src/main.cpp | 50 +++++++++++++++++++++++++++++-------- 1 file changed, 39 insertions(+), 11 deletions(-) diff --git a/jf-live-writer/src/main.cpp b/jf-live-writer/src/main.cpp index aa47b8c..417c919 100644 --- a/jf-live-writer/src/main.cpp +++ b/jf-live-writer/src/main.cpp @@ -3,9 +3,11 @@ #include #include #include -#include #include "live_writer_config.hpp" #include "WriterStats.hpp" +#include "broker_format.hpp" +#include +#include using namespace std; @@ -17,38 +19,64 @@ int main (int argc, char *argv[]) if (argc != 3) { cout << endl; cout << "Usage: jf_live_writer [detector_json_filename]" - " [writer_id]" << endl; + " [bits_per_pixel]" << endl; cout << "\tdetector_json_filename: detector config file path." << endl; - cout << "\twriter_id: Index of this writer instance." << endl; + cout << "\tbits_per_pixel: Number of bits in each pixel." << endl; cout << endl; exit(-1); } - auto config = BufferUtils::read_json_config(string(argv[1])); - const int writer_id = atoi(argv[2]); + auto const config = BufferUtils::read_json_config(string(argv[1])); + auto const bits_per_pixel = atoi(argv[2]); + + MPI_Init(NULL, NULL); + + int n_writers; + MPI_Comm_size(MPI_COMM_WORLD, &n_writers); + + int i_writer; + MPI_Comm_size(MPI_COMM_WORLD, &i_writer); auto ctx = zmq_ctx_new(); zmq_ctx_set(ctx, ZMQ_IO_THREADS, LIVE_ZMQ_IO_THREADS); auto receiver = BufferUtils::connect_socket( - ctx, config.detector_name, "assembler"); + ctx, config.detector_name, "broker-agent"); RamBuffer ram_buffer(config.detector_name, config.n_modules); - const uint64_t image_n_bytes = config.n_modules * MODULE_N_BYTES; - ImageBinaryWriter writer(config.detector_name, image_n_bytes); + const uint64_t image_n_bytes = + config.image_y_size * config.image_x_size * bits_per_pixel; + JFH5Writer writer(config); WriterStats stats(config.detector_name, STATS_MODULO, image_n_bytes); - ImageMetadata meta = {}; + StoreStream meta = {}; while (true) { zmq_recv(receiver, &meta, sizeof(meta), 0); - char* data = ram_buffer.read_image(meta.pulse_id); + + if (meta.op_code == OP_START) { + writer.open_run(meta.run_id, meta.n_images); + continue; + } + + if (meta.op_code == OP_END) { + writer.close_run(meta.run_id); + continue; + } + + if (meta.i_image % n_writers != i_writer) { + continue; + } + + char* data = ram_buffer.read_image(meta.image_metadata.pulse_id); stats.start_image_write(); - writer.write(meta, data); + writer.write(meta.run_id, meta.image_metadata, data); stats.end_image_write(); } + + MPI_Finalize(); } From 3c9a9d8bce70bf906a73809a84f3ffbcf7095036 Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Wed, 24 Feb 2021 08:56:54 +0100 Subject: [PATCH 43/61] Add output symlink name to writer config --- jf-live-writer/include/live_writer_config.hpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/jf-live-writer/include/live_writer_config.hpp b/jf-live-writer/include/live_writer_config.hpp index 76d9b05..d47c531 100644 --- a/jf-live-writer/include/live_writer_config.hpp +++ b/jf-live-writer/include/live_writer_config.hpp @@ -4,4 +4,6 @@ namespace live_writer_config { // N of IO threads to receive data from modules. const int LIVE_ZMQ_IO_THREADS = 1; + + const std::string OUTPUT_FOLDER_SYMLINK = "OUTPUT/" } \ No newline at end of file From 1ac041f51ce5787f033a6a6292447ec8a84b7be4 Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Wed, 24 Feb 2021 08:57:15 +0100 Subject: [PATCH 44/61] Make the live writer an MPI build --- jf-live-writer/CMakeLists.txt | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/jf-live-writer/CMakeLists.txt b/jf-live-writer/CMakeLists.txt index f96af5a..b6f1045 100644 --- a/jf-live-writer/CMakeLists.txt +++ b/jf-live-writer/CMakeLists.txt @@ -1,17 +1,29 @@ +find_package(MPI REQUIRED) +# Because of openmpi. +add_definitions(-DOMPI_SKIP_MPICXX) + file(GLOB SOURCES src/*.cpp) add_library(jf-live-writer-lib STATIC ${SOURCES}) -target_include_directories(jf-live-writer-lib PUBLIC include/) +target_include_directories(jf-live-writer-lib + PUBLIC include/ + SYSTEM ${MPI_INCLUDE_PATH}) + target_link_libraries(jf-live-writer-lib external - core-buffer-lib) + core-buffer-lib + ${MPI_LIBRARIES} + ) add_executable(jf-live-writer src/main.cpp) set_target_properties(jf-live-writer PROPERTIES OUTPUT_NAME jf_live_writer) target_link_libraries(jf-live-writer jf-live-writer-lib zmq + hdf5 + hdf5_hl + hdf5_cpp rt ) From b2694fb8495ab424581a9f8bb299894b0e483792 Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Wed, 24 Feb 2021 08:57:34 +0100 Subject: [PATCH 45/61] Add image size to detector config --- core-buffer/include/BufferUtils.hpp | 2 ++ core-buffer/src/BufferUtils.cpp | 4 +++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/core-buffer/include/BufferUtils.hpp b/core-buffer/include/BufferUtils.hpp index 1403bd0..a1219ff 100644 --- a/core-buffer/include/BufferUtils.hpp +++ b/core-buffer/include/BufferUtils.hpp @@ -19,6 +19,8 @@ namespace BufferUtils const int n_modules; const int start_udp_port; const std::string buffer_folder; + const int image_y_size; + const int image_x_size; }; diff --git a/core-buffer/src/BufferUtils.cpp b/core-buffer/src/BufferUtils.cpp index c763966..be85b4b 100644 --- a/core-buffer/src/BufferUtils.cpp +++ b/core-buffer/src/BufferUtils.cpp @@ -163,6 +163,8 @@ BufferUtils::DetectorConfig BufferUtils::read_json_config( config_parameters["detector_name"].GetString(), config_parameters["n_modules"].GetInt(), config_parameters["start_udp_port"].GetInt(), - config_parameters["buffer_folder"].GetString() + config_parameters["buffer_folder"].GetString(), + config_parameters["image_y_size"].GetInt(), + config_parameters["image_x_size"].GetInt() }; } From fb9ec376f68923b1b89cad460669b57d778bf3ef Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Wed, 24 Feb 2021 09:01:11 +0100 Subject: [PATCH 46/61] Add readme stub to H5 Writer --- jf-live-writer/README.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 jf-live-writer/README.md diff --git a/jf-live-writer/README.md b/jf-live-writer/README.md new file mode 100644 index 0000000..c1463f1 --- /dev/null +++ b/jf-live-writer/README.md @@ -0,0 +1,12 @@ +# jf-live-writer + +## Install PHDF5 manually +``` +wget https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.10/hdf5-1.10.7/src/hdf5-1.10.7.tar.gz +tar -xzf hdf5-1.10.7.tar.gz +cd hdf5-1.10.7 +./configure --enable-parallel +make +make install +``` + From 17485fbb6b25686554ad12be9e56b48b031db5c8 Mon Sep 17 00:00:00 2001 From: Ivan Usov Date: Wed, 24 Feb 2021 10:31:19 +0100 Subject: [PATCH 47/61] Add postprocess_raw.py --- scripts/postprocess_raw.py | 140 +++++++++++++++++++++++++++++++++++++ 1 file changed, 140 insertions(+) create mode 100644 scripts/postprocess_raw.py diff --git a/scripts/postprocess_raw.py b/scripts/postprocess_raw.py new file mode 100644 index 0000000..3d3a285 --- /dev/null +++ b/scripts/postprocess_raw.py @@ -0,0 +1,140 @@ +import os +import struct + +import bitshuffle +import h5py +import numpy as np +from bitshuffle.h5 import H5_COMPRESS_LZ4, H5FILTER # pylint: disable=no-name-in-module + +# bitshuffle hdf5 filter params +BLOCK_SIZE = 2048 +compargs = {"compression": H5FILTER, "compression_opts": (BLOCK_SIZE, H5_COMPRESS_LZ4)} +# limit bitshuffle omp to a single thread +# a better fix would be to use bitshuffle compiled without omp support +os.environ["OMP_NUM_THREADS"] = "1" + +DTYPE = np.dtype(np.uint16) +DTYPE_SIZE = DTYPE.itemsize + +MODULE_SIZE_X = 1024 +MODULE_SIZE_Y = 512 + + +def postprocess_raw( + source, dest, disabled_modules=(), index=None, compression=False, batch_size=100 +): + # a function for 'visititems' should have the args (name, object) + def _visititems(name, obj): + if isinstance(obj, h5py.Group): + h5_dest.create_group(name) + + elif isinstance(obj, h5py.Dataset): + dset_source = h5_source[name] + + # process all but the raw data + if name != data_dset: + if name.startswith("data"): + # datasets with data per image, so indexing should be applied + if index is None: + data = dset_source[:] + else: + data = dset_source[index, :] + + args = {"shape": data.shape} + h5_dest.create_dataset_like(name, dset_source, data=data, **args) + else: + h5_dest.create_dataset_like(name, dset_source, data=dset_source) + + else: + raise TypeError(f"Unknown h5py object type {obj}") + + # copy group/dataset attributes if it's not a dataset with the actual data + if name != data_dset: + for key, value in h5_source[name].attrs.items(): + h5_dest[name].attrs[key] = value + + with h5py.File(source, "r") as h5_source, h5py.File(dest, "w") as h5_dest: + detector_name = h5_source["general/detector_name"][()].decode() + data_dset = f"data/{detector_name}/data" + + # traverse the source file and copy/index all datasets, except the raw data + h5_source.visititems(_visititems) + + # now process the raw data + dset = h5_source[data_dset] + + args = dict() + if index is None: + n_images = dset.shape[0] + else: + index = np.array(index) + n_images = len(index) + + n_modules = dset.shape[1] // MODULE_SIZE_Y + out_shape = (MODULE_SIZE_Y * (n_modules - len(disabled_modules)), MODULE_SIZE_X) + + args["shape"] = (n_images, *out_shape) + args["maxshape"] = (n_images, *out_shape) + args["chunks"] = (1, *out_shape) + + if compression: + args.update(compargs) + + h5_dest.create_dataset_like(data_dset, dset, **args) + + # calculate and save module_map + module_map = [] + tmp = 0 + for ind in range(n_modules): + if ind in disabled_modules: + module_map.append(-1) + else: + module_map.append(tmp) + tmp += 1 + + h5_dest[f"data/{detector_name}/module_map"] = np.tile(module_map, (n_images, 1)) + + # prepare buffers to be reused for every batch + read_buffer = np.empty((batch_size, *dset.shape[1:]), dtype=DTYPE) + out_buffer = np.zeros((batch_size, *out_shape), dtype=DTYPE) + + # process and write data in batches + for batch_start_ind in range(0, n_images, batch_size): + batch_range = range(batch_start_ind, min(batch_start_ind + batch_size, n_images)) + + if index is None: + batch_ind = np.array(batch_range) + else: + batch_ind = index[batch_range] + + # TODO: avoid unnecessary buffers + read_buffer_view = read_buffer[: len(batch_ind)] + out_buffer_view = out_buffer[: len(batch_ind)] + + # Avoid a stride-bottleneck, see https://github.com/h5py/h5py/issues/977 + if np.sum(np.diff(batch_ind)) == len(batch_ind) - 1: + # consecutive index values + dset.read_direct(read_buffer_view, source_sel=np.s_[batch_ind]) + else: + for i, j in enumerate(batch_ind): + dset.read_direct(read_buffer_view, source_sel=np.s_[j], dest_sel=np.s_[i]) + + for i, m in enumerate(module_map): + if m == -1: + continue + + read_slice = read_buffer_view[:, i * MODULE_SIZE_Y : (i + 1) * MODULE_SIZE_Y, :] + out_slice = out_buffer_view[:, m * MODULE_SIZE_Y : (m + 1) * MODULE_SIZE_Y, :] + out_slice[:] = read_slice + + bytes_num_elem = struct.pack(">q", out_shape[0] * out_shape[1] * DTYPE_SIZE) + bytes_block_size = struct.pack(">i", BLOCK_SIZE * DTYPE_SIZE) + header = bytes_num_elem + bytes_block_size + + for pos, im in zip(batch_range, out_buffer_view): + if compression: + byte_array = header + bitshuffle.compress_lz4(im, BLOCK_SIZE).tobytes() + else: + byte_array = im.tobytes() + + h5_dest[data_dset].id.write_direct_chunk((pos, 0, 0), byte_array) From d8706f65daeb9184c14056b6e2b8802e68a2f21f Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Wed, 24 Feb 2021 11:04:36 +0100 Subject: [PATCH 48/61] Updated HDF5 version to latest --- jf-live-writer/README.md | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/jf-live-writer/README.md b/jf-live-writer/README.md index c1463f1..ce4349c 100644 --- a/jf-live-writer/README.md +++ b/jf-live-writer/README.md @@ -2,11 +2,12 @@ ## Install PHDF5 manually ``` -wget https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.10/hdf5-1.10.7/src/hdf5-1.10.7.tar.gz -tar -xzf hdf5-1.10.7.tar.gz +wget https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.12/hdf5-1.12.0/src/hdf5-1.12.0.tar.gz +tar -xzf hdf5-1.12.0.tar.gz cd hdf5-1.10.7 -./configure --enable-parallel -make +./configure --enable-parallel make install +sudo ln -v -s `pwd`/hdf5/lib/* /usr/lib64/ +sudo ln -v -s `pwd`/hdf5/include/* /usr/include/ ``` From a485736af4a690d57e8d162d0ac740ba993ba71e Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Thu, 25 Feb 2021 10:21:49 +0100 Subject: [PATCH 49/61] Reimplemented writer --- jf-live-writer/include/JFH5Writer.hpp | 58 ++-- jf-live-writer/src/JFH5Writer.cpp | 376 +++++++++++--------------- 2 files changed, 180 insertions(+), 254 deletions(-) diff --git a/jf-live-writer/include/JFH5Writer.hpp b/jf-live-writer/include/JFH5Writer.hpp index afd4a76..fb4a96a 100644 --- a/jf-live-writer/include/JFH5Writer.hpp +++ b/jf-live-writer/include/JFH5Writer.hpp @@ -3,48 +3,46 @@ #include #include -#include +#include +#include -#include "ImageAssembler.hpp" +extern "C" { + #include +} class JFH5Writer { + const std::string root_folder_; const std::string detector_name_; - const size_t n_modules_; - const uint64_t start_pulse_id_; - const uint64_t stop_pulse_id_; - const size_t pulse_id_step_; - const size_t n_images_; - const size_t n_total_pulses_; - size_t meta_write_index_; - size_t data_write_index_; + const uint32_t image_y_size_; + const uint32_t image_x_size_; - H5::H5File file_; - H5::DataSet image_dataset_; + static const int64_t NO_RUN_ID; + int64_t current_run_id_ = NO_RUN_ID; - uint64_t* b_pulse_id_; - uint64_t* b_frame_index_; - uint32_t* b_daq_rec_; - uint8_t* b_is_good_frame_ ; - - size_t get_n_pulses_in_range(const uint64_t start_pulse_id, - const uint64_t stop_pulse_id, - const int pulse_id_step); - - void write_metadata(); - std::string get_device_name(const std::string& device); + hid_t file_id_ = -1; + hid_t image_dataset_id_ = -1; + hid_t pulse_dataset_id_= -1; + hid_t frame_dataset_id_ = -1; + hid_t daq_rec_dataset_id_ = -1; + hid_t is_good_dataset_id_ = -1; + void open_file(const std::string& output_file, const uint32_t n_images); void close_file(); public: - JFH5Writer(const std::string& output_file, - const std::string& device, - const size_t n_modules, - const uint64_t start_pulse_id, - const uint64_t stop_pulse_id, - const size_t pulse_id_step); + JFH5Writer(const BufferUtils::DetectorConfig config); ~JFH5Writer(); - void write(const ImageMetadataBlock* metadata, const char* data); + void open_run(const int64_t run_id, const uint32_t n_images); + void close_run(); + + void write_data(const int64_t run_id, + const uint32_t index, + const char* data); + + void write_meta(const int64_t run_id, + const uint32_t index, + const ImageMetadata& meta); }; #endif //SFWRITER_HPP diff --git a/jf-live-writer/src/JFH5Writer.cpp b/jf-live-writer/src/JFH5Writer.cpp index 784597d..535d233 100644 --- a/jf-live-writer/src/JFH5Writer.cpp +++ b/jf-live-writer/src/JFH5Writer.cpp @@ -2,213 +2,181 @@ #include #include -#include +#include -#include "writer_config.hpp" + +#include "live_writer_config.hpp" #include "buffer_config.hpp" +#include "formats.hpp" -//extern "C" -//{ -// #include "H5DOpublic.h" -// #include -//} - -using namespace std; -using namespace writer_config; -using namespace buffer_config; - -JFH5Writer::JFH5Writer(const string& output_file, - const string& device, - const size_t n_modules, - const uint64_t start_pulse_id, - const uint64_t stop_pulse_id, - const size_t pulse_id_step) : - detector_name_(get_device_name(device)), - n_modules_(n_modules), - start_pulse_id_(start_pulse_id), - stop_pulse_id_(stop_pulse_id), - pulse_id_step_(pulse_id_step), - n_images_(get_n_pulses_in_range(start_pulse_id, - stop_pulse_id, - pulse_id_step)), - n_total_pulses_(stop_pulse_id_ - start_pulse_id_ + 1), - meta_write_index_(0), - data_write_index_(0) +extern "C" { - -// bshuf_register_h5filter(); - - file_ = H5::H5File(output_file, H5F_ACC_TRUNC); - file_.createGroup("/data"); - file_.createGroup("/data/" + detector_name_); - - H5::DataSpace att_space(H5S_SCALAR); - H5::DataType data_type = H5::StrType(0, H5T_VARIABLE); - - file_.createGroup("/general"); - auto detector_dataset = file_.createDataSet( - "/general/detector_name", data_type ,att_space); - - detector_dataset.write(detector_name_, data_type); - - hsize_t image_dataset_dims[3] = - {n_images_, n_modules * MODULE_Y_SIZE, MODULE_X_SIZE}; - - H5::DataSpace image_dataspace(3, image_dataset_dims); - - hsize_t image_dataset_chunking[3] = - {1, n_modules * MODULE_Y_SIZE, MODULE_X_SIZE}; - H5::DSetCreatPropList image_dataset_properties; - image_dataset_properties.setChunk(3, image_dataset_chunking); - -// // block_size, compression type -// uint compression_prop[] = -// {MODULE_N_PIXELS, //block size -// BSHUF_H5_COMPRESS_LZ4}; // Compression type -// -// H5Pset_filter(image_dataset_properties.getId(), -// BSHUF_H5FILTER, -// H5Z_FLAG_MANDATORY, -// 2, -// &(compression_prop[0])); - - image_dataset_ = file_.createDataSet( - "/data/" + detector_name_ + "/data", - H5::PredType::NATIVE_UINT16, - image_dataspace, - image_dataset_properties); - - b_pulse_id_ = new uint64_t[n_total_pulses_]; - b_frame_index_= new uint64_t[n_total_pulses_]; - b_daq_rec_ = new uint32_t[n_total_pulses_]; - b_is_good_frame_ = new uint8_t[n_total_pulses_]; + #include "H5DOpublic.h" + #include } -std::string JFH5Writer::get_device_name(const std::string& device) -{ - size_t last_separator; - if ((last_separator = device.rfind("/")) == string::npos) { - return device; - } +using namespace std; +using namespace buffer_config; +using namespace live_writer_config; - return device.substr(last_separator+1); +JFH5Writer::JFH5Writer(const BufferUtils::DetectorConfig config): + root_folder_(config.buffer_folder), + detector_name_(config.detector_name), + image_x_size_(config.image_x_size), + image_y_size_(config.image_y_size) +{ } JFH5Writer::~JFH5Writer() { close_file(); - - delete[] b_pulse_id_; - delete[] b_frame_index_; - delete[] b_daq_rec_; - delete[] b_is_good_frame_; } -size_t JFH5Writer::get_n_pulses_in_range( - const uint64_t start_pulse_id, - const uint64_t stop_pulse_id, - const int pulse_id_step) +void JFH5Writer::open_run(const int64_t run_id, const uint32_t n_images) { - if (stop_pulse_id < start_pulse_id) { - throw runtime_error("stop_pulse_id smaller than start_pulse_id."); - } + close_file(); - if (100 % pulse_id_step != 0) { - throw runtime_error("100 is not divisible by the pulse_id_step."); - } + const string output_folder = root_folder_ + "/" + OUTPUT_FOLDER_SYMLINK; + // TODO: Maybe add leading zeros to filename? + const string output_file = output_folder + to_string(run_id) + ".h5"; - if (start_pulse_id % pulse_id_step != 0) { - throw runtime_error("start_pulse_id not divisible by pulse_id_step."); - } + open_file(output_file, n_images); - if (stop_pulse_id % pulse_id_step != 0) { - throw runtime_error("stop_pulse_id not divisible by pulse_id_step."); - } - - size_t n_pulses = 1; - n_pulses += (stop_pulse_id / pulse_id_step); - n_pulses -= start_pulse_id / pulse_id_step; - - if (n_pulses == 0) { - throw runtime_error("Zero pulses to write in given range."); - } - - return n_pulses; + current_run_id_ = run_id; } -void JFH5Writer::write_metadata() +void JFH5Writer::close_run() { - hsize_t b_m_dims[] = {n_total_pulses_}; - hsize_t b_m_count[] = {n_images_}; - hsize_t b_m_start[] = {0}; - hsize_t b_m_stride[] = {pulse_id_step_}; - H5::DataSpace b_m_space (1, b_m_dims); - b_m_space.selectHyperslab(H5S_SELECT_SET, b_m_count, b_m_start, b_m_stride); + close_file(); + current_run_id_ = NO_RUN_ID; +} - hsize_t f_m_dims[] = {n_images_, 1}; - H5::DataSpace f_m_space(2, f_m_dims); +void JFH5Writer::open_file(const string& output_file, const uint32_t n_images) +{ + // Create file + auto fcpl_id = H5Pcreate(H5P_FILE_ACCESS); + if (fcpl_id == -1) { + throw runtime_error("Error in file access property list."); + } - auto pulse_id_dataset = file_.createDataSet( - "/data/" + detector_name_ + "/pulse_id", - H5::PredType::NATIVE_UINT64, f_m_space); - pulse_id_dataset.write( - b_pulse_id_, H5::PredType::NATIVE_UINT64, b_m_space, f_m_space); - pulse_id_dataset.close(); + if (H5Pset_fapl_mpio(fcpl_id, MPI_COMM_WORLD, MPI_INFO_NULL) < 0) { + throw runtime_error("Cannot set mpio to property list."); + } - auto frame_index_dataset = file_.createDataSet( - "/data/" + detector_name_ + "/frame_index", - H5::PredType::NATIVE_UINT64, f_m_space); - frame_index_dataset.write( - b_frame_index_, H5::PredType::NATIVE_UINT64, b_m_space, f_m_space); - frame_index_dataset.close(); + file_id_ = H5Fcreate( + output_file.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT, fcpl_id); + if (file_id_ < 0) { + throw runtime_error("Cannot create output file."); + } - auto daq_rec_dataset = file_.createDataSet( - "/data/" + detector_name_ + "/daq_rec", - H5::PredType::NATIVE_UINT32, f_m_space); - daq_rec_dataset.write( - b_daq_rec_, H5::PredType::NATIVE_UINT32, b_m_space, f_m_space); - daq_rec_dataset.close(); + H5Pclose(fcpl_id); - auto is_good_frame_dataset = file_.createDataSet( - "/data/" + detector_name_ + "/is_good_frame", - H5::PredType::NATIVE_UINT8, f_m_space); - is_good_frame_dataset.write( - b_is_good_frame_, H5::PredType::NATIVE_UINT8, b_m_space, f_m_space); - is_good_frame_dataset.close(); + // Create group + auto data_group_id = H5Gcreate(file_id_, detector_name_.c_str(), + H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + if (data_group_id < 0) { + throw runtime_error("Cannot create data group."); + } + + // Create image dataset. + auto dcpl_id = H5Pcreate(H5P_DATASET_CREATE); + if (dcpl_id < 0) { + throw runtime_error("Error in creating dataset create property list."); + } + + hsize_t image_dataset_chunking[] = {1, image_y_size_, image_x_size_}; + if (H5Pset_chunk(dcpl_id, 3, image_dataset_chunking) < 0) { + throw runtime_error("Cannot set image dataset chunking."); + } + + if (H5Pset_fill_time(dcpl_id, H5D_FILL_TIME_NEVER) < 0) { + throw runtime_error("Cannot set image dataset fill time."); + } + + if (H5Pset_alloc_time(dcpl_id, H5D_ALLOC_TIME_EARLY) < 0) { + throw runtime_error("Cannot set image dataset allocation time."); + } + + hsize_t image_dataset_dims[] = {n_images, image_y_size_, image_x_size_}; + auto image_space_id = H5Screate_simple(3, image_dataset_dims, NULL); + if (image_space_id < 0) { + throw runtime_error("Cannot create image dataset space."); + } + + // TODO: Enable compression. +// bshuf_register_h5filter(); +// uint filter_prop[] = {PIXEL_N_BYTES, BSHUF_H5_COMPRESS_LZ4}; +// if (H5Pset_filter(dcpl_id, BSHUF_H5FILTER, H5Z_FLAG_MANDATORY, +// 2, filter_prop) < 0) { +// throw runtime_error("Cannot set compression filter on dataset."); +// } + + image_dataset_id_ = H5Dcreate( + data_group_id, "data", H5T_NATIVE_INT, image_space_id, + H5P_DEFAULT, dcpl_id, H5P_DEFAULT); + if (image_dataset_id_ < 0) { + throw runtime_error("Cannot create image dataset."); + } + + // Create metadata datasets. + hsize_t meta_dataset_dims[] = {n_images}; + auto meta_space_id = H5Screate_simple(1, meta_dataset_dims, NULL); + if (meta_space_id < 0) { + throw runtime_error("Cannot create meta dataset space."); + } + + auto create_meta_dataset = [&](string name, hid_t data_type) { + auto dataset_id = H5Dcreate( + data_group_id, name.c_str(), data_type, meta_space_id, + H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + if (dataset_id < 0) { + throw runtime_error("Cannot create " + name + " dataset."); + } + + return dataset_id; + }; + + pulse_dataset_id_ = create_meta_dataset("pulse_id", H5T_NATIVE_UINT64); + frame_dataset_id_ = create_meta_dataset("frame_index", H5T_NATIVE_UINT64); + daq_rec_dataset_id_ = create_meta_dataset("daq_rec", H5T_NATIVE_UINT32); + is_good_dataset_id_ = create_meta_dataset("is_good_frame", H5T_NATIVE_UINT8); + + H5Sclose(meta_space_id); + H5Sclose(image_space_id); + H5Pclose(dcpl_id); + H5Gclose(data_group_id); } void JFH5Writer::close_file() { - if (file_.getId() == -1) { + if (file_id_ < 0) { return; } - image_dataset_.close(); + H5Dclose(image_dataset_id_); + image_dataset_id_ = -1; - write_metadata(); + H5Dclose(pulse_dataset_id_); + pulse_dataset_id_ = -1; - file_.close(); + H5Dclose(frame_dataset_id_); + frame_dataset_id_ = -1; + + H5Dclose(daq_rec_dataset_id_); + daq_rec_dataset_id_ = -1; + + H5Dclose(is_good_dataset_id_); + is_good_dataset_id_ = -1; + + H5Fclose(file_id_); + file_id_ = -1; } -void JFH5Writer::write( - const ImageMetadataBlock* metadata, const char* data) +void JFH5Writer::write_data( + const int64_t run_id, const uint32_t index, const char* data) { - size_t n_images_offset = 0; - if (start_pulse_id_ > metadata->block_start_pulse_id) { - n_images_offset = start_pulse_id_ - metadata->block_start_pulse_id; - } - - if (n_images_offset > BUFFER_BLOCK_SIZE) { - throw runtime_error("Received unexpected block for start_pulse_id."); - } - - size_t n_images_to_copy = BUFFER_BLOCK_SIZE - n_images_offset; - if (stop_pulse_id_ < metadata->block_stop_pulse_id) { - n_images_to_copy -= metadata->block_stop_pulse_id - stop_pulse_id_; - } - - if (n_images_to_copy < 1) { - throw runtime_error("Received unexpected block for stop_pulse_id."); + if (run_id != current_run_id_) { + throw runtime_error("Invalid run_id."); } // hsize_t b_i_dims[3] = {BUFFER_BLOCK_SIZE, @@ -234,60 +202,20 @@ void JFH5Writer::write( // image_dataset_.write( // data, H5::PredType::NATIVE_UINT16, b_i_space, f_i_space); - // TODO: Can the i_image++ be made more efficient? - for (size_t i_image=n_images_offset; - i_image < n_images_offset + n_images_to_copy; - i_image++) { + hsize_t offset[] = {data_write_index_, 0, 0}; + size_t data_offset = i_image * MODULE_N_BYTES * n_modules_; + + H5DOwrite_chunk( + image_dataset_.getId(), + H5P_DEFAULT, + 0, + offset, + MODULE_N_BYTES * n_modules_, + data + data_offset); +} + +void JFH5Writer::write_meta( + const int64_t run_id, const uint32_t index, const ImageMetadata& meta) +{ - if (i_image % pulse_id_step_ != 0) { - continue; - } - - hsize_t offset[] = {data_write_index_, 0, 0}; - size_t data_offset = i_image * MODULE_N_BYTES * n_modules_; - - H5DOwrite_chunk( - image_dataset_.getId(), - H5P_DEFAULT, - 0, - offset, - MODULE_N_BYTES * n_modules_, - data + data_offset); - - data_write_index_++; - } - - // pulse_id - { - auto b_current_ptr = b_pulse_id_ + meta_write_index_; - memcpy(b_current_ptr, - &(metadata->pulse_id[n_images_offset]), - sizeof(uint64_t) * n_images_to_copy); - } - - // frame_index - { - auto b_current_ptr = b_frame_index_ + meta_write_index_; - memcpy(b_current_ptr, - &(metadata->frame_index[n_images_offset]), - sizeof(uint64_t) * n_images_to_copy); - } - - // daq_rec - { - auto b_current_ptr = b_daq_rec_ + meta_write_index_; - memcpy(b_current_ptr, - &(metadata->daq_rec[n_images_offset]), - sizeof(uint32_t) * n_images_to_copy); - } - - // is_good_frame - { - auto b_current_ptr = b_is_good_frame_ + meta_write_index_; - memcpy(b_current_ptr, - &(metadata->is_good_image[n_images_offset]), - sizeof(uint8_t) * n_images_to_copy); - } - - meta_write_index_ += n_images_to_copy; } From 6ad5b9538dd9f544bdbe9d06f69113cffa3bb0ed Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Thu, 25 Feb 2021 10:22:21 +0100 Subject: [PATCH 50/61] Remove unused OP code --- jf-live-writer/include/broker_format.hpp | 1 - 1 file changed, 1 deletion(-) diff --git a/jf-live-writer/include/broker_format.hpp b/jf-live-writer/include/broker_format.hpp index 9943d9c..3e8d3cd 100644 --- a/jf-live-writer/include/broker_format.hpp +++ b/jf-live-writer/include/broker_format.hpp @@ -3,7 +3,6 @@ #include "formats.hpp" -const static uint8_t OP_CONTINUE = 0; const static uint8_t OP_START = 1; const static uint8_t OP_END = 2; From 9a3df073311e02aef49cb66adf50e194a38fbc35 Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Thu, 25 Feb 2021 10:26:59 +0100 Subject: [PATCH 51/61] Adjust main in live writer for metadata writes --- jf-live-writer/src/main.cpp | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/jf-live-writer/src/main.cpp b/jf-live-writer/src/main.cpp index 417c919..25cdfa9 100644 --- a/jf-live-writer/src/main.cpp +++ b/jf-live-writer/src/main.cpp @@ -61,21 +61,23 @@ int main (int argc, char *argv[]) } if (meta.op_code == OP_END) { - writer.close_run(meta.run_id); + writer.close_run(); continue; } - if (meta.i_image % n_writers != i_writer) { - continue; + // Fair distribution of images among writers. + if (meta.i_image % n_writers == i_writer) { + char* data = ram_buffer.read_image(meta.image_metadata.pulse_id); + + stats.start_image_write(); + writer.write_data(meta.run_id, meta.i_image, data); + stats.end_image_write(); } - char* data = ram_buffer.read_image(meta.image_metadata.pulse_id); - - stats.start_image_write(); - - writer.write(meta.run_id, meta.image_metadata, data); - - stats.end_image_write(); + // Only the first instance writes metadata. + if (i_writer == 0) { + writer.write_meta(meta.run_id, meta.i_image, meta.image_metadata); + } } MPI_Finalize(); From 592741048a6dd50b69f827707589a9ff38c8b941 Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Thu, 25 Feb 2021 10:47:46 +0100 Subject: [PATCH 52/61] Add image details to store stream format --- jf-live-writer/include/broker_format.hpp | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/jf-live-writer/include/broker_format.hpp b/jf-live-writer/include/broker_format.hpp index 3e8d3cd..ecdca44 100644 --- a/jf-live-writer/include/broker_format.hpp +++ b/jf-live-writer/include/broker_format.hpp @@ -9,10 +9,13 @@ const static uint8_t OP_END = 2; #pragma pack(push) #pragma pack(1) struct StoreStream { - uint8_t op_code; + int64_t run_id; uint32_t i_image; uint32_t n_images; - int64_t run_id; + uint32_t image_y_size; + uint32_t image_x_size; + uint32_t op_code; + uint32_t bits_per_pixel; ImageMetadata image_metadata; }; From 676e8215d9ec4717130cf4e2f675df1f518a4a0f Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Thu, 25 Feb 2021 10:49:59 +0100 Subject: [PATCH 53/61] Moving image size initialization into each run --- jf-live-writer/include/JFH5Writer.hpp | 17 +++++++++++++---- jf-live-writer/src/JFH5Writer.cpp | 9 +++++++-- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/jf-live-writer/include/JFH5Writer.hpp b/jf-live-writer/include/JFH5Writer.hpp index fb4a96a..18becef 100644 --- a/jf-live-writer/include/JFH5Writer.hpp +++ b/jf-live-writer/include/JFH5Writer.hpp @@ -14,12 +14,15 @@ class JFH5Writer { const std::string root_folder_; const std::string detector_name_; - const uint32_t image_y_size_; - const uint32_t image_x_size_; static const int64_t NO_RUN_ID; - int64_t current_run_id_ = NO_RUN_ID; + // Run specific variables. + int64_t current_run_id_ = NO_RUN_ID; + uint32_t image_y_size_ = 0; + uint32_t image_x_size_ = 0; + + // Open file specific variables. hid_t file_id_ = -1; hid_t image_dataset_id_ = -1; hid_t pulse_dataset_id_= -1; @@ -27,13 +30,19 @@ class JFH5Writer { hid_t daq_rec_dataset_id_ = -1; hid_t is_good_dataset_id_ = -1; + hid_t get_datatype(const int bits_per_pixel); void open_file(const std::string& output_file, const uint32_t n_images); void close_file(); public: JFH5Writer(const BufferUtils::DetectorConfig config); ~JFH5Writer(); - void open_run(const int64_t run_id, const uint32_t n_images); + + void open_run(const int64_t run_id, + const uint32_t n_images, + const uint32_t image_y_size, + const uint32_t image_x_size, + const uint32_t bits_per_pixel); void close_run(); void write_data(const int64_t run_id, diff --git a/jf-live-writer/src/JFH5Writer.cpp b/jf-live-writer/src/JFH5Writer.cpp index 535d233..6e6c03e 100644 --- a/jf-live-writer/src/JFH5Writer.cpp +++ b/jf-live-writer/src/JFH5Writer.cpp @@ -22,8 +22,6 @@ using namespace live_writer_config; JFH5Writer::JFH5Writer(const BufferUtils::DetectorConfig config): root_folder_(config.buffer_folder), detector_name_(config.detector_name), - image_x_size_(config.image_x_size), - image_y_size_(config.image_y_size) { } @@ -32,6 +30,8 @@ JFH5Writer::~JFH5Writer() close_file(); } + + void JFH5Writer::open_run(const int64_t run_id, const uint32_t n_images) { close_file(); @@ -53,6 +53,7 @@ void JFH5Writer::close_run() void JFH5Writer::open_file(const string& output_file, const uint32_t n_images) { + // Create file auto fcpl_id = H5Pcreate(H5P_FILE_ACCESS); if (fcpl_id == -1) { @@ -217,5 +218,9 @@ void JFH5Writer::write_data( void JFH5Writer::write_meta( const int64_t run_id, const uint32_t index, const ImageMetadata& meta) { + if (run_id != current_run_id_) { + throw runtime_error("Invalid run_id."); + } + } From 2324ebde4f8c2bc5ea37491e50796bd0a854eadd Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Thu, 25 Feb 2021 10:51:04 +0100 Subject: [PATCH 54/61] Adjust main to new signature --- jf-live-writer/src/main.cpp | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/jf-live-writer/src/main.cpp b/jf-live-writer/src/main.cpp index 25cdfa9..4d77d29 100644 --- a/jf-live-writer/src/main.cpp +++ b/jf-live-writer/src/main.cpp @@ -56,7 +56,11 @@ int main (int argc, char *argv[]) zmq_recv(receiver, &meta, sizeof(meta), 0); if (meta.op_code == OP_START) { - writer.open_run(meta.run_id, meta.n_images); + writer.open_run(meta.run_id, + meta.n_images, + meta.image_y_size, + meta.image_x_size, + meta.bits_per_pixel); continue; } From 5e603115b9e14f9c5526fe1aa43a24fc521fdeed Mon Sep 17 00:00:00 2001 From: Dmitry Ozerov Date: Thu, 25 Feb 2021 11:46:32 +0100 Subject: [PATCH 55/61] implement disable modules possibility in detector retrieve process --- scripts/export_file.py | 75 ++++++++++++++++++++++++++++-------------- 1 file changed, 50 insertions(+), 25 deletions(-) diff --git a/scripts/export_file.py b/scripts/export_file.py index 4aa0388..52d6224 100644 --- a/scripts/export_file.py +++ b/scripts/export_file.py @@ -7,6 +7,12 @@ import numpy as np import jungfrau_utils as ju +import sys +sys.path.append('/home/dbe/git/sf_daq_buffer/scripts') +import postprocess_raw + +import os + parser = argparse.ArgumentParser() parser.add_argument("file_in", type=str) @@ -27,8 +33,9 @@ with open(args.json_run, "r") as run_file: data = json.load(run_file) detector_params = data["detectors"][detector_name] - compression = detector_params.get("compression", True) - conversion = detector_params.get("adc_to_energy", True) + compression = detector_params.get("compression", False) + conversion = detector_params.get("adc_to_energy", False) + disabled_modules = detector_params.get("disabled_modules", []) if conversion: mask = detector_params.get("mask", True) mask_double_pixels = detector_params.get("mask_double_pixels", True) @@ -46,30 +53,48 @@ if not mask and mask_double_pixels: warnings.warn("mask_double_pixels set to False") mask_double_pixels = False -with ju.File( - args.file_in, - gain_file=gain_file, - pedestal_file=pedestal_file, - conversion=conversion, - mask=mask, - gap_pixels=gap_pixels, - geometry=geometry, - parallel=False, -) as juf: - n_input_frames = len(juf["data"]) - good_frames = np.nonzero(juf["is_good_frame"])[0] - n_output_frames = len(good_frames) +file_tmp = args.file_in +if len(disabled_modules)>0: + print(f"Will reduce data file, disabled_modules: {disabled_modules}") + if conversion: + file_tmp = args.file_out+".tmp" + else: + file_tmp = args.file_out + postprocess_raw.postprocess_raw(args.file_in, file_tmp, compression=compression, disabled_modules=disabled_modules) - juf.handler.mask_double_pixels = mask_double_pixels - juf.export( - args.file_out, - index=good_frames, - roi=None, - compression=compression, - factor=factor, - dtype=None, - batch_size=35, - ) +if conversion: + + with ju.File( + file_tmp, + gain_file=gain_file, + pedestal_file=pedestal_file, + conversion=conversion, + mask=mask, + gap_pixels=gap_pixels, + geometry=geometry, + parallel=False, + ) as juf: + n_input_frames = len(juf["data"]) + good_frames = np.nonzero(juf["is_good_frame"])[0] + n_output_frames = len(good_frames) + + juf.handler.mask_double_pixels = mask_double_pixels + juf.export( + args.file_out, + index=good_frames, + roi=None, + compression=compression, + factor=factor, + dtype=None, + batch_size=35, + ) + os.remove(file_tmp) + +else: + with h5py.File(file_tmp, "r") as juf: + n_input_frames = len(juf[f"data/{detector_name}/data"]) + good_frames = np.nonzero(juf[f"data/{detector_name}/is_good_frame"])[0] + n_output_frames = len(good_frames) # Utility info with h5py.File(args.file_out, "r") as h5f: From 8583597a51471d17eb6469d60e208f8efb77b065 Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Fri, 26 Feb 2021 12:03:20 +0100 Subject: [PATCH 56/61] Bits per pixel added to live writer --- jf-live-writer/include/JFH5Writer.hpp | 1 + jf-live-writer/src/JFH5Writer.cpp | 26 +++++++++++++++++--------- 2 files changed, 18 insertions(+), 9 deletions(-) diff --git a/jf-live-writer/include/JFH5Writer.hpp b/jf-live-writer/include/JFH5Writer.hpp index 18becef..3a681f2 100644 --- a/jf-live-writer/include/JFH5Writer.hpp +++ b/jf-live-writer/include/JFH5Writer.hpp @@ -21,6 +21,7 @@ class JFH5Writer { int64_t current_run_id_ = NO_RUN_ID; uint32_t image_y_size_ = 0; uint32_t image_x_size_ = 0; + uint32_t bits_per_pixel_ = 0; // Open file specific variables. hid_t file_id_ = -1; diff --git a/jf-live-writer/src/JFH5Writer.cpp b/jf-live-writer/src/JFH5Writer.cpp index 6e6c03e..a9d2a52 100644 --- a/jf-live-writer/src/JFH5Writer.cpp +++ b/jf-live-writer/src/JFH5Writer.cpp @@ -30,30 +30,38 @@ JFH5Writer::~JFH5Writer() close_file(); } - - -void JFH5Writer::open_run(const int64_t run_id, const uint32_t n_images) +void JFH5Writer::open_run(const int64_t run_id, + const uint32_t n_images, + const uint32_t image_y_size, + const uint32_t image_x_size, + const uint32_t bits_per_pixel) { - close_file(); + close_run(); const string output_folder = root_folder_ + "/" + OUTPUT_FOLDER_SYMLINK; // TODO: Maybe add leading zeros to filename? const string output_file = output_folder + to_string(run_id) + ".h5"; - open_file(output_file, n_images); - current_run_id_ = run_id; + image_y_size_ = image_y_size; + image_x_size_ = image_x_size; + bits_per_pixel_ = bits_per_pixel; + + open_file(output_file, n_images); } void JFH5Writer::close_run() { close_file(); + current_run_id_ = NO_RUN_ID; + image_y_size_ = 0; + image_x_size_ = 0; + bits_per_pixel_ = 0; } void JFH5Writer::open_file(const string& output_file, const uint32_t n_images) { - // Create file auto fcpl_id = H5Pcreate(H5P_FILE_ACCESS); if (fcpl_id == -1) { @@ -113,8 +121,8 @@ void JFH5Writer::open_file(const string& output_file, const uint32_t n_images) // } image_dataset_id_ = H5Dcreate( - data_group_id, "data", H5T_NATIVE_INT, image_space_id, - H5P_DEFAULT, dcpl_id, H5P_DEFAULT); + data_group_id, "data", get_datatype(bits_per_pixel_), + image_space_id, H5P_DEFAULT, dcpl_id, H5P_DEFAULT); if (image_dataset_id_ < 0) { throw runtime_error("Cannot create image dataset."); } From 00696c572b403b6abf27f93c63841d30f78d61f0 Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Fri, 26 Feb 2021 12:03:48 +0100 Subject: [PATCH 57/61] Remove live writer from build until finished --- CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index ff244cf..f392ac3 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -33,4 +33,4 @@ add_subdirectory("jf-udp-recv") add_subdirectory("jf-buffer-writer") add_subdirectory("jf-assembler") add_subdirectory("sf-stream") -add_subdirectory("jf-live-writer") +#add_subdirectory("jf-live-writer") From 32d0773a551d63afe296f8b8f62a6fa487e60397 Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Fri, 26 Feb 2021 12:37:05 +0100 Subject: [PATCH 58/61] Fix reference to changed variable in ImageAssembler --- sf-writer/src/ImageAssembler.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sf-writer/src/ImageAssembler.cpp b/sf-writer/src/ImageAssembler.cpp index ac1b851..1a9a5e6 100644 --- a/sf-writer/src/ImageAssembler.cpp +++ b/sf-writer/src/ImageAssembler.cpp @@ -86,7 +86,7 @@ void ImageAssembler::process( memcpy( &(frame_meta_buffer_[meta_offset]), - &(frame.metadata), + &(frame.meta), sizeof(ModuleFrame)); meta_offset += meta_offset_step; From 0a79167c481ba599cf1c1ac1ec74fed18d12f5d6 Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Fri, 26 Feb 2021 12:38:37 +0100 Subject: [PATCH 59/61] Add zmq library to sf-writer links --- sf-writer/CMakeLists.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/sf-writer/CMakeLists.txt b/sf-writer/CMakeLists.txt index dc87ad2..5b520f5 100644 --- a/sf-writer/CMakeLists.txt +++ b/sf-writer/CMakeLists.txt @@ -11,6 +11,7 @@ add_executable(sf-writer src/main.cpp) set_target_properties(sf-writer PROPERTIES OUTPUT_NAME sf_writer) target_link_libraries(sf-writer sf-writer-lib + zmq hdf5 hdf5_hl hdf5_cpp From ee8a1fbdae3da11437afc62da268d1d06886ce0b Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Fri, 26 Feb 2021 14:23:35 +0100 Subject: [PATCH 60/61] Removed unused image size from config --- core-buffer/include/BufferUtils.hpp | 2 -- core-buffer/src/BufferUtils.cpp | 2 -- 2 files changed, 4 deletions(-) diff --git a/core-buffer/include/BufferUtils.hpp b/core-buffer/include/BufferUtils.hpp index a1219ff..1403bd0 100644 --- a/core-buffer/include/BufferUtils.hpp +++ b/core-buffer/include/BufferUtils.hpp @@ -19,8 +19,6 @@ namespace BufferUtils const int n_modules; const int start_udp_port; const std::string buffer_folder; - const int image_y_size; - const int image_x_size; }; diff --git a/core-buffer/src/BufferUtils.cpp b/core-buffer/src/BufferUtils.cpp index be85b4b..b6b1bb5 100644 --- a/core-buffer/src/BufferUtils.cpp +++ b/core-buffer/src/BufferUtils.cpp @@ -164,7 +164,5 @@ BufferUtils::DetectorConfig BufferUtils::read_json_config( config_parameters["n_modules"].GetInt(), config_parameters["start_udp_port"].GetInt(), config_parameters["buffer_folder"].GetString(), - config_parameters["image_y_size"].GetInt(), - config_parameters["image_x_size"].GetInt() }; } From a461255241cd48ce933956b53c7c8bd96562f73b Mon Sep 17 00:00:00 2001 From: Andrej Babic Date: Fri, 26 Feb 2021 14:26:19 +0100 Subject: [PATCH 61/61] Fix test of image assembler --- sf-writer/test/test_ImageAssembler.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sf-writer/test/test_ImageAssembler.cpp b/sf-writer/test/test_ImageAssembler.cpp index 1482442..97bdbb3 100644 --- a/sf-writer/test/test_ImageAssembler.cpp +++ b/sf-writer/test/test_ImageAssembler.cpp @@ -50,7 +50,7 @@ TEST(ImageAssembler, reconstruction) for (size_t i_module=0; i_module < n_modules; i_module++) { for (size_t i_pulse=0; i_pulse < BUFFER_BLOCK_SIZE; i_pulse++) { - auto& frame_meta = buffer_block->frame[i_pulse].metadata; + auto& frame_meta = buffer_block->frame[i_pulse].meta; frame_meta.pulse_id = 100 + i_pulse; frame_meta.daq_rec = 1000 + i_pulse;