Files
dev/script/tools/save_snapshot_mult.py
2018-01-19 10:56:53 +01:00

137 lines
4.4 KiB
Python
Executable File

import ch.psi.pshell.imaging.ImageBuffer as ImageBuffer
import java.math.BigInteger as BigInteger
import org.python.core.PyLong as PyLong
import org.python.core.PyFloat as PyFloat
import json
import traceback
import datetime
PARALLELIZE = True
if get_exec_pars().source == CommandSource.ui:
camera_name = "simulation_sp"
shared = False
images = 10
interval = -1
roi = "" #"[540, 200, 430,100]"
else:
camera_name = args[0]
shared = args[1].lower() == "true"
images = int(args[2])
interval = int(args[3])
roi = args[4]
set_exec_pars(name="camera_snapshot")
path_image = "/image"
path_pid = "/pulse_id"
path_timestamp_str = "/timestamp_str"
snapshotFile = None
cam_server.start(camera_name, shared)
if roi is not None and len(roi.strip())>0:
roi = json.loads(roi)
cam_server.setRoi(roi[0], roi[2], roi[1], roi[3])
while True:
cam_server.waitNext(10000)
r = json.loads(cam_server.stream.take()["processing_parameters"])
if roi == r["image_region_of_interest"]:
break;
else:
cam_server.waitNext(10000)
width = cam_server.data.width
height = cam_server.data.height
type_image = 'f'
def create_tables(stream_value):
global width, height, type_image
create_dataset(path_image, type_image, dimensions = [images, height, width])
create_dataset(path_pid, 'l', dimensions = [images])
create_dataset(path_timestamp_str, 's', dimensions = [images])
for id in stream_value.identifiers:
val = stream_value.getValue(id)
if id == "image":
pass
elif id == "processing_parameters":
val = json.loads(val)
for key in val.keys():
set_attribute(path_image, key, "" if val[key] is None else val[key] )
elif isinstance(val, PyArray):
create_dataset("/"+id, 'd', dimensions = [images, len(val)])
elif isinstance(val, PyLong):
create_dataset("/"+id, 'l', dimensions = [images])
elif isinstance(val, PyFloat):
create_dataset("/"+id, 'd', dimensions = [images])
else:
print "Unmanaged stream type: ", val, type(val)
pass
def append_frame(data, stream_value, index):
global path_image, width, height, type_image
print "Saving frame :", index
#append_dataset(path_image, data, index, type = type_image)
append_dataset(path_image, stream_value.getValue("image"),[index,0,0], type = type_image, shape=[1, height, width])
append_dataset(path_pid, stream_value.getPulseId(), index)
append_dataset(path_timestamp_str, datetime.datetime.fromtimestamp(stream_value.timestampNanos/1e9).strftime('%Y-%m-%d %H:%M:%S'), index)
for id in stream_value.identifiers:
try:
val = stream_value.getValue(id)
if id == "image":
pass
elif isinstance(val, PyArray):
append_dataset("/"+id, val, index)
elif isinstance(val, PyLong):
append_dataset("/"+id, int(val), index)
elif isinstance(val, PyFloat):
append_dataset("/"+id, float(val), index)
else:
pass
except:
print id, val
traceback.print_exc()
print "Saved frame :", index
tasks = []
cam_server.paused = True
try:
for i in range(images):
if i==0:
create_tables(cam_server.stream.take())
start = time.time()
stream_value = cam_server.stream.take()
if PARALLELIZE:
tasks.extend( fork((append_frame,(cam_server.data.matrix,stream_value, i)),) )
else:
append_frame(cam_server.data.matrix, stream_value, i)
if i< (images-1):
if interval<=0:
cam_server.stream.waitCacheChange(10000)
else:
sleep_time = float(interval)/1000.0 - (time.time()-start)
time.sleep(max(sleep_time,0))
finally:
cam_server.paused = False
pass
print "Waiting finish persisting..."
join(tasks)
print "Done"
#Enforce the same timestamp to data & image files.
set_exec_pars(open = False)
data_file = get_exec_pars().path
set_return(data_file)