Files
sf-op/script/Tools/save_snapshot_mult.py
2017-11-13 10:03:12 +01:00

115 lines
3.8 KiB
Python

import ch.psi.pshell.imaging.ImageBuffer as ImageBuffer
import java.math.BigInteger as BigInteger
import org.python.core.PyLong as PyLong
import org.python.core.PyFloat as PyFloat
import json
import traceback
import datetime
PARALLELIZE = True
if get_exec_pars().source == CommandSource.ui:
camera_name = "SLG-LCAM-C041_sp"
shared = False
images = 10
interval = 1000
else:
camera_name = args[0]
shared = args[1].lower() == "true"
images = int(args[2])
interval = int(args[3])
set_exec_pars(name="camera_snapshot")
path_image = "/image"
path_pid = "/pulse_id"
path_timestamp_str = "/timestamp_str"
snapshotFile = None
if shared:
cam_server.start(camera_name + "_sp1", shared)
else:
cam_server.start(camera_name)
def append_frame(data, stream_value, index):
global path_image
print "Saving frame :", index
append_dataset(path_image, data, index)
append_dataset(path_pid, stream_value.getPulseId(), index)
append_dataset(path_timestamp_str, datetime.datetime.fromtimestamp(stream_value.timestampNanos/1e9).strftime('%Y-%m-%d %H:%M:%S'), index)
for id in stream_value.identifiers:
try:
val = stream_value.getValue(id)
if id == "image":
pass
elif isinstance(val, PyArray):
append_dataset("/"+id, val, index)
elif isinstance(val, PyLong):
append_dataset("/"+id, int(val), index)
elif isinstance(val, PyFloat):
append_dataset("/"+id, float(val), index)
else:
pass
except:
print id, val
traceback.print_exc()
print "Saved frame :", index
tasks = []
cam_server.waitNext(10000)
for i in range(images):
if i==0:
stream_value = cam_server.stream.take()
width = cam_server.data.width
height = cam_server.data.height
create_dataset(path_image, 'd', dimensions = [height, width, images])
create_dataset(path_pid, 'l', dimensions = [images])
create_dataset(path_timestamp_str, 's', dimensions = [images])
for id in stream_value.identifiers:
val = stream_value.getValue(id)
if id == "image":
pass
elif id == "processing_parameters":
val = json.loads(val)
for key in val.keys():
set_attribute(path_image, key, "" if val[key] is None else val[key] )
elif isinstance(val, PyArray):
create_dataset("/"+id, 'd', dimensions = [images, len(val)])
elif isinstance(val, PyLong):
create_dataset("/"+id, 'l', dimensions = [images])
elif isinstance(val, PyFloat):
create_dataset("/"+id, 'd', dimensions = [images])
else:
print "Unmanaged stream type: ", val, type(val)
pass
start = time.time()
stream_value = cam_server.stream.take()
if PARALLELIZE:
tasks.extend( fork((append_frame,(cam_server.data.matrix,stream_value, i)),) )
else:
append_frame(cam_server.data.matrix, stream_value, i)
if i< (images-1):
if interval<=0:
cam_server.waitNext(10000)
else:
sleep_time = float(interval)/1000.0 - (time.time()-start)
if (sleep_time>0):
print "Sleeping ", sleep_time
time.sleep(sleep_time)
print "Waiting finish persisting..."
join(tasks)
print "Done"
#Enforce the same timestamp to data & image files.
set_exec_pars(open = False)
data_file = get_exec_pars().path
set_return(data_file)