mirror of
https://github.com/paulscherrerinstitute/sf_daq_buffer.git
synced 2026-05-01 23:02:21 +02:00
version used in July Alvra experiment (conversion and compression of files using Ivan's export_file.py code based on jungfrau_utils); make list of frames in crystfel notation to separate laser light/dark
This commit is contained in:
committed by
Data Backend account
parent
44a60a8d27
commit
df51bb6ca5
@@ -0,0 +1,98 @@
|
||||
import argparse
|
||||
import json
|
||||
import warnings
|
||||
|
||||
import h5py
|
||||
import numpy as np
|
||||
|
||||
import jungfrau_utils as ju
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument("file_in", type=str)
|
||||
parser.add_argument("file_out", type=str)
|
||||
parser.add_argument("json_run", type=str)
|
||||
parser.add_argument("json_detector", type=str)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
with open(args.json_detector, "r") as detector_file:
|
||||
data = json.load(detector_file)
|
||||
|
||||
detector_name = data["detector_name"]
|
||||
gain_file = data["gain_file"]
|
||||
pedestal_file = data["pedestal_file"]
|
||||
|
||||
with open(args.json_run, "r") as run_file:
|
||||
data = json.load(run_file)
|
||||
detector_params = data["detectors"][detector_name]
|
||||
|
||||
compression = detector_params.get("compression", True)
|
||||
conversion = detector_params.get("adc_to_energy", True)
|
||||
if conversion:
|
||||
mask = detector_params.get("mask", True)
|
||||
mask_double_pixels = detector_params.get("mask_double_pixels", True)
|
||||
geometry = detector_params.get("geometry", False)
|
||||
gap_pixels = detector_params.get("gap_pixels", True)
|
||||
factor = detector_params.get("factor", None)
|
||||
else:
|
||||
mask = False
|
||||
mask_double_pixels = False
|
||||
geometry = False
|
||||
gap_pixels = False
|
||||
factor = None
|
||||
|
||||
if not mask and mask_double_pixels:
|
||||
warnings.warn("mask_double_pixels set to False")
|
||||
mask_double_pixels = False
|
||||
|
||||
if factor:
|
||||
dtype = np.int32
|
||||
else:
|
||||
dtype = None
|
||||
|
||||
with ju.File(
|
||||
args.file_in,
|
||||
gain_file=gain_file,
|
||||
pedestal_file=pedestal_file,
|
||||
conversion=conversion,
|
||||
mask=mask,
|
||||
gap_pixels=gap_pixels,
|
||||
geometry=geometry,
|
||||
parallel=False,
|
||||
) as juf:
|
||||
n_input_frames = len(juf["data"])
|
||||
good_frames = np.nonzero(juf["is_good_frame"])[0]
|
||||
n_output_frames = len(good_frames)
|
||||
|
||||
juf.handler.mask_double_pixels = mask_double_pixels
|
||||
juf.export(args.file_out, index=good_frames, roi=None, compression=compression, factor=factor, dtype=dtype, batch_size=500)
|
||||
pixel_mask = juf.handler.get_pixel_mask(gap_pixels=gap_pixels, geometry=geometry)
|
||||
|
||||
# Postprocessing
|
||||
with h5py.File(args.file_out, "r+") as h5f:
|
||||
h5f[f"/data/{detector_name}/pixel_mask"] = np.invert(pixel_mask)
|
||||
if conversion:
|
||||
print("daq_rec:", h5f[f"/data/{detector_name}/daq_rec"][0, 0])
|
||||
del h5f[f"/data/{detector_name}/daq_rec"]
|
||||
|
||||
frame_index = h5f[f"/data/{detector_name}/frame_index"][:]
|
||||
print("frame_index range:", (np.min(frame_index), np.max(frame_index)))
|
||||
del h5f[f"/data/{detector_name}/frame_index"]
|
||||
|
||||
del h5f[f"/data/{detector_name}/is_good_frame"]
|
||||
|
||||
print("input frames:", n_input_frames)
|
||||
print("bad frames:", n_input_frames - n_output_frames)
|
||||
print("output frames:", n_output_frames)
|
||||
|
||||
print("gain_file:", gain_file)
|
||||
print("pedestal_file:", pedestal_file)
|
||||
print("conversion:", conversion)
|
||||
print("mask:", mask)
|
||||
print("mask_double_pixels:", mask_double_pixels)
|
||||
print("geometry:", geometry)
|
||||
print("gap_pixels:", gap_pixels)
|
||||
print("compression:", compression)
|
||||
print("factor:", factor)
|
||||
|
||||
@@ -0,0 +1,114 @@
|
||||
import argparse
|
||||
|
||||
import numpy as np
|
||||
import os
|
||||
|
||||
import h5py
|
||||
import json
|
||||
|
||||
def is_it_dark(laser_mode, detector_rate, pulseid):
|
||||
|
||||
dark_rate = 1
|
||||
if 100/detector_rate == int(100/detector_rate):
|
||||
dark_rate = 100/detector_rate
|
||||
if laser_mode == 11: # 50/50 mode
|
||||
dark_rate *= 2
|
||||
elif laser_mode == 41: # 4 lights, 1 dark sequence
|
||||
dark_rate *= 5
|
||||
elif laser_mode == 111: # 11 lights, 1 dark sequence
|
||||
dark_rate *= 12
|
||||
elif laser_mode == 191: # 19 lights, 1 dark sequence
|
||||
dark_rate *=20
|
||||
|
||||
dark = True
|
||||
|
||||
if laser_mode == 0:
|
||||
dark = True
|
||||
elif laser_mode == 1:
|
||||
dark = False
|
||||
else:
|
||||
if (pulseid + int(100/detector_rate) ) % dark_rate == 0:
|
||||
dark = True
|
||||
else:
|
||||
dark = False
|
||||
|
||||
return dark
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("data_file", type=str)
|
||||
parser.add_argument("run_info", type=str)
|
||||
args = parser.parse_args()
|
||||
|
||||
data_file = args.data_file
|
||||
run_info_file = args.run_info
|
||||
|
||||
try:
|
||||
with open(run_info_file) as json_file:
|
||||
parameters = json.load(json_file)
|
||||
except:
|
||||
print("Can't read provided run file {run_info_file}, may be not json?")
|
||||
exit()
|
||||
|
||||
laser_mode = parameters.get("laser_mode", 0)
|
||||
rate_multiplicator = parameters.get("rate_multiplicator", 1)
|
||||
detector_rate = 100//rate_multiplicator
|
||||
|
||||
print("Laser mode: ", laser_mode, ", detector runs at ", detector_rate, "Hz")
|
||||
|
||||
try:
|
||||
f=h5py.File(data_file, "r")
|
||||
except:
|
||||
print(f"Can't open {data_file}")
|
||||
exit()
|
||||
|
||||
detector = 'JF06T08V01'
|
||||
|
||||
pulseids = f[f'/data/{detector}/pulse_id'][:]
|
||||
n_pulse_id = len(pulseids)
|
||||
if f'/data/{detector}/is_good_frame' in f.keys():
|
||||
is_good_frame = f[f'/data/{detector}/is_good_frame'][:]
|
||||
else:
|
||||
is_good_frame = [1] * n_pulse_id
|
||||
|
||||
nGoodFrames = 0
|
||||
nProcessedFrames = 0
|
||||
|
||||
index_dark = []
|
||||
index_light = []
|
||||
for i in range(len(pulseids)):
|
||||
if not is_good_frame[i]:
|
||||
continue
|
||||
nGoodFrames += 1
|
||||
p = pulseids[i]
|
||||
nProcessedFrames += 1
|
||||
if is_it_dark(laser_mode, detector_rate, p):
|
||||
index_dark.append(i)
|
||||
else:
|
||||
index_light.append(i)
|
||||
|
||||
f.close()
|
||||
|
||||
print("Total number of frames: %s, number of good frames : %s, processed frames: %s, outputed frames: %s(dark) %s(light) " % (len(pulseids), nGoodFrames, nProcessedFrames, len(index_dark), len(index_light)) )
|
||||
|
||||
delim = '//'
|
||||
|
||||
if len(index_dark) > 0:
|
||||
file_dark = data_file[:-3] + ".dark.lst"
|
||||
if laser_mode == -1:
|
||||
file_dark = data_file[:-3] + ".undefined.lst"
|
||||
print(f"List of dark frames : {file_dark} , {len(index_dark)} frames")
|
||||
f_list = open(file_dark, "w")
|
||||
for frame_number in index_dark:
|
||||
print(f'{data_file} //{frame_number}', file = f_list)
|
||||
f_list.close()
|
||||
|
||||
if len(index_light) > 0:
|
||||
file_light = data_file[:-3] + ".light.lst"
|
||||
print(f"List of light frames : {file_light} , {len(index_light)} frames")
|
||||
f_list = open(file_light, "w")
|
||||
for frame_number in index_light:
|
||||
print(f'{data_file} {delim}{frame_number}', file = f_list)
|
||||
f_list.close()
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ if [ $# -lt 3 ]
|
||||
then
|
||||
echo "Usage : $0 detector_name start_pulse_id end_pulse_id "
|
||||
echo "Example : $0 JF07T32V01 11709404000 11709405000 "
|
||||
echo "Optional parameters: output_file_name rate_multiplicator"
|
||||
echo "Optional parameters: output_file_name rate_multiplicator jf_conversion run_file raw_file"
|
||||
exit
|
||||
fi
|
||||
|
||||
@@ -12,6 +12,9 @@ DETECTOR=$1
|
||||
START_PULSE_ID=$2
|
||||
STOP_PULSE_ID=$3
|
||||
PULSE_ID_STEP=1 # by default assume 100Hz
|
||||
JF_CONVERSION=0 # by default don't call ju_export
|
||||
RUN_FILE=None
|
||||
RAW_FILE=None
|
||||
|
||||
echo "Request to retrieve : $@ "
|
||||
echo "Started : "`date`
|
||||
@@ -24,20 +27,49 @@ else
|
||||
OUTFILE=/gpfs/photonics/swissfel/buffer/test.${START_PULSE_ID}-${STOP_PULSE_ID}.h5
|
||||
fi
|
||||
|
||||
if [ $# -eq 5 ]
|
||||
if [ $# -ge 5 ]
|
||||
then
|
||||
PULSE_ID_STEP=$5
|
||||
fi
|
||||
|
||||
if [ $# -ge 6 ]
|
||||
then
|
||||
JF_CONVERSION=$6
|
||||
if [ $# -ge 7 ]
|
||||
then
|
||||
RUN_FILE=$7
|
||||
fi
|
||||
if [ $# -eq 8 ]
|
||||
then
|
||||
RAW_FILE=$8
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
case ${DETECTOR} in
|
||||
'JF01T03V01')
|
||||
NM=3
|
||||
DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF01.json
|
||||
;;
|
||||
'JF02T09V02')
|
||||
NM=9
|
||||
DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF02.json
|
||||
;;
|
||||
'JF06T32V01')
|
||||
NM=32
|
||||
DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF06.json
|
||||
;;
|
||||
'JF06T08V01')
|
||||
NM=8
|
||||
DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF06_4M.json
|
||||
;;
|
||||
'JF07T32V01')
|
||||
NM=32
|
||||
DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF07.json
|
||||
;;
|
||||
'JF13T01V01')
|
||||
NM=1
|
||||
DET_CONFIG_FILE=/gpfs/photonics/swissfel/buffer/config/stream-JF13.json
|
||||
;;
|
||||
*)
|
||||
NM=1
|
||||
@@ -62,12 +94,47 @@ echo -n "Waited Time : "
|
||||
echo $((date2-date1)) | awk '{print int($1/60)":"int($1%60)}'
|
||||
echo "Started actual retrieve : "`date`
|
||||
|
||||
taskset -c ${coreAssociated} /usr/bin/sf_writer ${OUTFILE} /gpfs/photonics/swissfel/buffer/${DETECTOR} ${NM} ${START_PULSE_ID} ${STOP_PULSE_ID} ${PULSE_ID_STEP}>> /tmp/detector_retrieve.log &
|
||||
if [ ${JF_CONVERSION} == 0 ]
|
||||
then
|
||||
OUTFILE_RAW=${OUTFILE}
|
||||
else
|
||||
if [ ${RAW_FILE} != "None" ]
|
||||
then
|
||||
OUTFILE_RAW=${RAW_FILE}
|
||||
D1=`dirname ${OUTFILE_RAW}`
|
||||
mkdir -p ${D1}
|
||||
else
|
||||
RUN_NUMBER=`basename ${RUN_FILE} | awk -F '.' '{print $1}'`
|
||||
D1=`dirname ${RUN_FILE}`
|
||||
D2=`dirname ${D1}`
|
||||
OUTFILE_RAW=${D2}/.raw/${RUN_NUMBER}.${DETECTOR}.h5
|
||||
mkdir -p ${D2}/.raw/
|
||||
fi
|
||||
fi
|
||||
|
||||
taskset -c ${coreAssociated} /usr/bin/sf_writer ${OUTFILE_RAW} /gpfs/photonics/swissfel/buffer/${DETECTOR} ${NM} ${START_PULSE_ID} ${STOP_PULSE_ID} ${PULSE_ID_STEP}>> /tmp/detector_retrieve.log &
|
||||
|
||||
wait
|
||||
|
||||
coreAssociatedConversion="35,34,33,32,31,30,29,28,27"
|
||||
|
||||
date3=$(date +%s)
|
||||
echo "Finished : "`date`
|
||||
echo -n "Retrieve Time : "
|
||||
echo $((date3-date2)) | awk '{print int($1/60)":"int($1%60)}'
|
||||
|
||||
if [ ${JF_CONVERSION} == 0 ]
|
||||
then
|
||||
echo "File is written in raw format, no compression"
|
||||
else
|
||||
echo "Will call compression/convertion ${OUTFILE_RAW} --> ${OUTFILE}"
|
||||
export PATH=/home/dbe/miniconda3/bin:$PATH
|
||||
source deactivate >/dev/null 2>&1
|
||||
source activate conversion
|
||||
taskset -c ${coreAssociatedConversion} python /home/dbe/git/sf_daq_buffer/scripts/export_file.py ${OUTFILE_RAW} ${OUTFILE} ${RUN_FILE} ${DET_CONFIG_FILE}
|
||||
python /home/dbe/git/sf_daq_buffer/scripts/make_crystfel_list.py ${OUTFILE} ${RUN_FILE}
|
||||
date4=$(date +%s)
|
||||
echo "Finished : "`date`
|
||||
echo -n "Convertion Time : "
|
||||
echo $((date4-date3)) | awk '{print int($1/60)":"int($1%60)}'
|
||||
fi
|
||||
|
||||
Reference in New Issue
Block a user