added most up-to-date versions of the SwissMX tools/utils

This commit is contained in:
Appleby Martin Vears
2025-06-13 13:44:40 +02:00
parent a67321d959
commit 5a424db2ca
13 changed files with 1650 additions and 0 deletions

View File

@@ -0,0 +1,322 @@
import sys,os,socket
import argparse
sys.path.insert(0, os.path.expanduser('/sf/cristallina/applications/slic/slic-package'))
from slic.devices.timing.events import CTASequencer
class IntRange:
def __init__(self, imin=None, imax=None):
self.imin = imin
self.imax = imax
def __call__(self, arg):
try:
value = int(arg)
except ValueError:
raise self.exception()
if (self.imin is not None and value < self.imin) or (self.imax is not None and value > self.imax):
raise self.exception()
return value
def exception(self):
if self.imin is not None and self.imax is not None:
return argparse.ArgumentTypeError(f"ON OFF ratio must be an integer in the range [{self.imin}, {self.imax}]")
elif self.imin is not None:
return argparse.ArgumentTypeError(f"ON OFF ratio must be an integer >= {self.imin}")
elif self.imax is not None:
return argparse.ArgumentTypeError(f"ON OFF ratio must be an integer <= {self.imax}")
else:
return argparse.ArgumentTypeError("ON OFF ratio must be an integer")
class tuple_splitter:
def __init__(self, x=None, y=None, ssz=False):
self.x=x
self.y=y
self.ssz=ssz
def __call__(self,arg):
try:
self.x = int(arg.split(',')[0])
self.y = int(arg.split(',')[1])
except ValueError:
raise self.exception()
if self.ssz and self.y != self.x-1:
raise self.exception()
return (self.x,self.y)
def exception(self):
if self.ssz and self.y != self.x-1:
return argparse.ArgumentTypeError(f"for current motion scheme x,y should equal x, x-1 not {x}, {y}")
else:
if self.ssz:
return argparse.ArgumentTypeError("ssz x and y must be an integer")
else:
return argparse.ArgumentTypeError("x and y must be an integer")
class MotionSim:
def __init__(self, args=None):
try:
self.cta=CTASequencer(args.cta_sequencer)
self.clear_flag=args.clear
self.check_sequence=args.check_sequence
self.motion_mode=args.motion_mode
self.repetitions=args.grid_size[0]
self.cta_multiplier=args.grid_size[1]
self.number_of_appertures=self.repetitions*self.cta_multiplier
self.on_off_ratio=args.on_off_ratio+1
self.triggered=args.triggered
self.ssz=args.ssz
self.smv=args.smv
self.twait=args.twait
self.tmove=args.tmove
except:
self.clear=False
self.check_sequence=False
self.cta=CTASequencer("SAR-CCTA-ESC")
self.motion_mode=4
self.repetitions=1
self.cta_multiplier=1
self.number_of_appertures=1
self.on_off_ratio=1
self.triggered=False
raise self.exception()
def exception(self):
if args is not None:
print(args)
return argparse.ArgumentTypeError("problem with arg parse inputs")
else:
return argparse.ArgumentTypeError("no args given, using default values")
def standard_motion(self):
#need to implement on_off_ratio
self.cta.seq[214]=[1,]+[0,]*(self.cta_multiplier-1) #start motion
self.cta.seq[200]=[1,]*self.cta_multiplier #uncomment me for normal operation
self.cta.seq[219]=[1,]*self.cta_multiplier #trigger detector
if self.triggered:
self.cta.seq[215]=[1,0]*(self.cta_multiplier//2) # shutter always open
self.cta.seq[216]=[1,0,]*(self.cta_multiplier//2) # Label image light dark 1:1 #change back to 1,0 for normal on off measurements
self.n_pulses_run = len(self.cta.seq[200])*self.repetitions
self.cta.cfg.repetitions = self.repetitions
self.upload()
return
def hit_and_return(self):
if self.smv:
transition_pulses=self.smv
else:
transition_pulses=self.ssz[0]-1
apperture_group_size = self.ssz[0] * self.ssz[1]
number_of_apperture_groups = self.number_of_appertures // apperture_group_size
self.repetitions = number_of_apperture_groups # number of repeitions is now number of times we repeat the loop
xray_sequence = [0,] * apperture_group_size + [1,] * apperture_group_size + [0,] * (transition_pulses - 1)
self.cta.seq[200] = xray_sequence
self.cta.seq[214] = [1,] + [0,] * (apperture_group_size-1) + [0,] * apperture_group_size + [0,] * (transition_pulses - 1) #start motion
if self.triggered:
if self.on_off_ratio > apperture_group_size:
self.on_off_ratio = apperture_group_size
on_off_sequence = [1,] * (self.on_off_ratio - 1) + [0,]
self.cta.seq[215] = on_off_sequence * (apperture_group_size//self.on_off_ratio) + [0,] * apperture_group_size + [0,] * (transition_pulses -1) # Trigger 1:1
repeat_sequence_laser_closed = [1, ] + [0,] * (apperture_group_size - 1)
repeat_sequence_laser_open = [1,] + [0,] * (apperture_group_size - 1)
tranisition_sequence_laser_shut = [0,] * (transition_pulses - 1) #close shutter after final point before starting move...?
self.cta.seq[213] = repeat_sequence_laser_open + repeat_sequence_laser_closed + tranisition_sequence_laser_shut # Trigger 1:1
self.cta.seq[216] = [0,] * apperture_group_size + on_off_sequence * (apperture_group_size//self.on_off_ratio) + [0,] * (transition_pulses - 1)
else:
self.cta.seq[215] = [0] * apperture_group_size * 2 + [0,] * (transition_pulses - 1) # trigger (laser_shutter or droplet ejector)
self.cta.seq[216] = [0] * apperture_group_size * 2 + [0,] * (transition_pulses -1) # image label (on or off)1
self.cta.seq[219] = xray_sequence # detector trigger
self.cta.cfg.repetitions = self.repetitions
self.upload()
return
def stop_and_go(self):
if self.tmove==0:
print("did you mean tmove=0 and not 10? setting wait_pulses to 0!")
wait_pulses=0
else:
wait_pulses=self.twait//self.tmove
xray_sequence = [0,] * wait_pulses + [1,]
self.cta.seq[200] = xray_sequence * self.cta_multiplier # x-ray_shutter
self.cta.seq[214] = [1,] + [0,] * (len(xray_sequence * self.cta_multiplier) -1) #start motion
if self.triggered:
trigger_on = [1,] + [0,] * wait_pulses
trigger_off = [0,] + [0,] * wait_pulses
trigger_sequence = trigger_off + trigger_on * (self.on_off_ratio-1)
image_on = [0,] * wait_pulses + [1,]
image_label_sequence = [0,] * wait_pulses + [0,] + image_on * (self.on_off_ratio-1)
self.cta.seq[215] = trigger_sequence * (self.cta_multiplier//self.on_off_ratio) # trigger (laser_shutter or droplet ejector)
self.cta.seq[216] = image_label_sequence * (self.cta_multiplier//self.on_off_ratio) # image label (on or off)
else:
no_trigger_sequence = [0,] + [0,] * wait_pulses
self.cta.seq[215] = no_trigger_sequence * self.cta_multiplier # x-ray_shutter # trigger (laser_shutter or droplet ejector)
self.cta.seq[216] = no_trigger_sequence * self.cta_multiplier # x-ray_shutter # image label (on or off)1
self.cta.seq[219] = xray_sequence * self.cta_multiplier # detector trigger
self.cta.cfg.repetitions = self.repetitions
self.upload()
return
def upload(self):
self.cta.seq.upload()
return
def clear(self):
self.cta.seq[200]=[1,]
self.upload()
return
def single_event(self, event:int):
#self.cta.seq[200]=[0,]*99+[1,]
self.cta.seq[event]=[0,]*99+[1,]
self.upload()
return
def check(self):
self.cta.seq.download()
print(self.cta.seq)
try:
print(f"length of sequence: {len(self.cta.seq[200])}")
except Exception as e:
print(f"Exception {e}")
return
def main(args):
ctaSeq = MotionSim(args)
if args.clear:
print("Clearing the CTA (has to have an event so setss [200] to [1]")
ctaSeq.clear()
return
elif args.check_sequence:
print("Checking the current CTA sequence")
ctaSeq.check()
return
elif args.droplet:
print("Clearing the CTA and priming a single event of [200] and [215] for aligning the droplet ejector")
event=215
ctaSeq.single_event(event)
elif args.shutter:
print("Clearing the CTA and priming a single event of [200] and [213] for aligning the droplet ejector")
event=213
ctaSeq.single_event(event)
elif args.motion_mode == 1 or args.motion_mode == 2 or args.motion_mode == 3:
print("probably doesnt use or doesn't need the cta. But would use a similar scheme to motion_mode 4")
ctaSeq.standard_motion()
event=200
elif args.motion_mode == 4:
ctaSeq.standard_motion()
event=200
elif args.motion_mode == 5:
ctaSeq.stop_and_go()
event=200
elif args.motion_mode == 6:
ctaSeq.hit_and_return()
event=200
else:
print('nothing happened')
return
n_pulses_run = len(ctaSeq.cta.seq[event])*ctaSeq.repetitions
bsdata_scalar = n_pulses_run/ctaSeq.number_of_appertures
print(f"uploading sequence to cta of {n_pulses_run} pulses. Sequence length: {len(ctaSeq.cta.seq[200])} for one event. Repetitions: {ctaSeq.repetitions}.")
print(f"BS data would be scaled by a factor of {bsdata_scalar}.")
ctaSeq.upload()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"-c",
"--clear",
help="replace current cta sequence",
action='store_true',
)
parser.add_argument(
"-m",
"--motion_mode",
help="Mimic cta code generation for specific motion code, limit:1-6. Mode:1 point list, mode:2 unused, mode 3: inverse list, mode 4: DEFAULT (short code), motion 5: stop and go, motion 6: hit and return",
type=IntRange(1,6),
default=4
)
parser.add_argument(
"-g",
"--grid_size",
help="size of grid, default 162,162",
type=tuple_splitter(ssz=False),
default='162,162'
)
parser.add_argument(
"-o",
"--on_off_ratio",
help="the number of on shots (i.e. laser triggers, drolet ejector) per single dark shot. i.e. 1 would give you a 1 on: 1 off ratio. limit:1-100, Note for hit and return the ",
type=IntRange(1,100),
default=1
)
parser.add_argument(
"-w",
"--twait",
help="input the wait time in multiples of 10 i.e. 10 ms, 20 ms.",
type=int,
default=10
)
parser.add_argument(
"-s",
"--ssz",
help="needed for motion mode 6, size fo the repeating grid i.e. 4x3, 8x7. Currently limited in real motion to x,x-1 due to smv requirement",
type=tuple_splitter(ssz=True),
default=None
)
parser.add_argument(
"--check_sequence",
help="print current cta sequence",
action='store_true',
)
parser.add_argument(
"-t",
"--triggered",
help="print current cta sequence",
action='store_true',
)
parser.add_argument(
"--tmove",
help="time between moves for motion mode 5 = stop and go, should be 10",
type=int,
default=10
)
parser.add_argument(
"--smv",
help="# of pulses to move between repeats in x and y i.e 3,3 would be 3 pulses to change in x and 3 pulses to change in y. Default value x-1. Currently must be same in x and y due to cta limitations.",
type=int,
default=None
)
parser.add_argument(
"--cta_sequencer",
help="name of sequencer PV, default = SAR-CCTA-ESC",
type=str,
default="SAR-CCTA-ESC"
)
parser.add_argument(
"--droplet",
help="for aligning droplet ejector, event 215 at 100 hz",
action='store_true',
)
parser.add_argument(
"--shutter",
help="for aligning laser during hit and return, event 213 at 100 hz",
action='store_true',
)
args = parser.parse_args()
if args.motion_mode == 5 and args.twait is None:
parser.error("--motion_mode = 5 requires twait to define wait time at each point.")
if args.motion_mode == 6 and args.ssz is None:
parser.error("--motion_mode = 6 requires ssz to define repeating grid.")
main(args)

276
cta/list_sort_clara.py Normal file
View File

@@ -0,0 +1,276 @@
import h5py
import sys, os
import argparse
import numpy as np
from pathlib import Path
sys.path.insert(0, os.path.expanduser('/sf/cristallina/applications/mx/slic'))
def sort_event_laser(events, ind_JF, ind_BS):
#sort by event 63 logic
index_dark = []
index_light = []
blanks = []
for idx_JF, idx_BS in zip(ind_JF, ind_BS):
events = evt_set[idx_BS]
if events[63] and events[200]:
index_dark.append(idx_JF)
elif events[200]:
index_light.append(idx_JF)
else:
print(f"unknown event: {events}")
blanks.append(idx_JF)
def sort_event_cta(events, event ind_JF, ind_BS):
#sort by cta event logic i.e. 215/216
index_dark = []
index_light = []
blanks = []
for idx_JF, idx_BS in zip(ind_JF, ind_BS):
events = evt_set[idx_BS]
if events[trigger_event] and events[200]:
index_light.append(idx_JF)
elif events[200]:
index_dark.append(idx_JF)
else:
print(f"unknown event: {events}")
blanks.append(idx_JF)
def sort_jfjoch_header(jf_data, pulseids_JF):
index_dark = []
index_light = []
blanks = []
evt_set_jfj = jf_data[f"/entry/xfel/eventCode"][:]
n_pulse_id = len(pulseids_JF)
for idx_JF in range(n_pulse_id):
events = evt_set_jfj[idx_JF]
if events == 15:
index_dark.append(idx_JF)
elif events == 13:
index_light.append(idx_JF)
else:
print(f"unknown event: {events}")
blanks.append(idx_JF)
return index_dark, index_light, blank
def sort_bsdata(data_directory, event, jf_data, pulseids_JF):
for acquisition in range(1,27):
bsdata_path = os.path.join(data_directory,'data','acq{0:0=4d}.BSDATA.h5'.format(acquisition))
try:
#bsdata = SFDataFiles(bsdata_path)
bsdata = h5py.File(bsdata_path, "r") #r"/sf/cristallina/data/p21630/raw/run0065-lov_movestop_normal_1/data/acq0001.BSDATA.h5"
except Exception as e:
print(f"didn't open bsdata due to error {e}")
return
pulseids_BS = bsdata[f"/SAR-CVME-TIFALL6:EvtSet/pulse_id"][:]
evt_set=bsdata[f"/SAR-CVME-TIFALL6:EvtSet/data"][:]
_inters, ind_JF, ind_BS = np.intersect1d(pulseids_JF, pulseids_BS, return_indices=True)
if trigger_event == 63:
index_dark, index_light, blank = sort_event_laser(events, ind_JF, ind_BS)
else:
index_dark, index_light, blank = sort_event_cta(events, event ind_JF, ind_BS)
bsdata.close()
return index_dark, index_light, blank
def sort_acquisition(data_directory, acquisition, output_name, event, jfjoch_header):
detector='JF17T16V01'
jf_path= os.path.join(data_directory,'data','acq{0:0=4d}.{1}.h5'.format(acquisition,detector))
print(jf_path)
try:
jf_data = h5py.File(jf_path, "r")
except Exception as e:
print(f"didn't open JF17T16V01.h5 due to error {e}")
return
pulseids_JF = jf_data[f"/entry/xfel/pulseID"][:]
if jfjoch_header:
index_dark, index_light, blank = sort_jfjoch_header(jf_data, pulseids_JF)
else:
index_dark, index_light, blank = sort_bsdata(data_directory, event, jf_data, pulseids_JF)
jf_data.close()
acq_dark = []
acq_light = []
acq_blank = []
delim = "//"
if index_dark:
for frame_number in index_dark:
acq_dark.append(f"{jf_path} {delim}{frame_number}")
if index_light:
for frame_number in index_light:
acq_light.append(f"{jf_path} {delim}{frame_number}")
if blanks:
for frame_number in blanks:
acq_blank.append(f"{jf_path} {delim}{frame_number}")
return acq_light, acq_dark, acq_blank
def generate_list_file_all(output_name, index_dark=None, index_light=None, blanks=None):
delim = "//"
if index_dark:
file_dark = output_name + ".dark.lst"
print(f"List of dark frames : {file_dark} , {len(index_dark)} frames")
dir_dark=f"{output_name}_off"
file_off = f"{dir_dark}/{acquisition}.off.lst"
Path(dir_dark).mkdir(parents=True, exist_ok=True)
with open(file_off, "w") as f_list:
for frame in acq_dark:
print(f"{frame}", file = f_list)
if index_light:
file_light = output_name + ".light.lst"
print(f"List of light frames : {file_light} , {len(index_light)} frames")
dir_light=f"{output_name}_on"
file_on = f"{dir_light}/{acquisition}.on.lst"
Path(dir_light).mkdir(parents=True, exist_ok=True)
with open(file_on, "w") as f_list:
for frame in acq_light:
print(f"{frame}", file = f_list)
if blanks:
file_blank = output_name + ".blanks.lst"
print(f"List of blank frames : {file_blank} , {len(blanks)} frames")
dir_blank=f"{output_name}_blank"
file_blank = f"{dir_blank}/{acquisition}.blank.lst"
Path(dir_blank).mkdir(parents=True, exist_ok=True)
with open(file_blank, "w") as f_list:
for frame in acq_blank:
print(f"{frame}", file = f_list)
def generate_list_files(output_name, index_dark=None, index_light=None, blanks=None, acquisition=None):
delim = "//"
acq='acq{0:0=4d}'.format(acquisition)
if index_dark:
dir_dark=f"{output_name}_off"
file_dark = f"{dir_dark}/{acq}.off.lst"
Path(dir_dark).mkdir(parents=True, exist_ok=True)
print(f"List of dark frames : {file_dark} , {len(index_dark)} frames")
with open(file_dark, "w") as f_list:
for frame in index_dark:
print(f"{frame}", file = f_list)#{data_file} {delim}{frame_number}
if index_light:
dir_light=f"{output_name}_on"
Path(dir_light).mkdir(parents=True, exist_ok=True)
file_light = f"{dir_light}/{acq}.on.lst"
print(f"List of light frames : {file_light} , {len(index_light)} frames")
with open(file_light, "w") as f_list:
for frame in index_light:
print(f"{frame}", file = f_list)#{data_file} {delim}{frame_number}
if blanks:
dir_blank=f"{output_name}_blank"
Path(dir_blank).mkdir(parents=True, exist_ok=True)
file_blank = f"{dir_blank}/{acq}.blanks.lst"
print(f"List of blank frames : {file_blank} , {len(blanks)} frames")
with open(file_blank, "w") as f_list:
for frame in blanks:
print(f"{frame}", file = f_list)#{data_file} {delim}{frame_number}
def main(args):
data_directory = args.input_data_directory
event = args.event
jfjoch_header=args.jfjoch_header
try:
output_name = args.output_name
except:
output_name=data_directory.split('/')[-1]
number_of_acqusitions = os.listdir(os.path.join(data_directory,'meta'))
index_light_all=[]
index_dark_all=[]
blanks_all = []
for i in range(1,len(number_of_acqusitions)):
acq_light,acq_dark,acq_blank= sort_acquisition(data_directory, i, output_name)
generate_list_files(output_name, acq_dark, acq_light, acq_blank, acquisition=i)
index_light_all+=(acq_light)
index_dark_all+=(acq_dark)
blanks_all+=(acq_blank)
generate_list_file_all(output_name, index_dark_all, index_light_all, blanks_all)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"-i",
"--input_data_directory",
help="raw data directory and run i.e. /sf/cristallina/data/p21630/raw/run0065-lov_movestop_normal_1/",
type=os.path.abspath,
required=True
)
parser.add_argument(
"-o",
"--ouput_name",
help="the name of the job to be done. if not included file names will be automatically generated based on input run name",
type=str,
)
parser.add_argument(
"-j",
"--jfjoch_header",
help="flag to sort by jfjoch_header",
action='store_true',
)
parser.add_argument(
"-e",
"--event_label_trigger",
help="number of event trigger i.e. 63, 216.",
type=int,
default=216
)
args = parser.parse_args()
main(args)

View File

@@ -0,0 +1,112 @@
import h5py
import sys, os
import argparse
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
def sort_acquisition(data_directory, acquisition):
detector='JF17T16V01j'
bsdata_path = os.path.join(data_directory,'data','acq{0:0=4d}.BSDATA.h5'.format(acquisition))
try:
f = h5py.File(bsdata_path, "r") #r"/sf/cristallina/data/p21630/raw/run0065-lov_movestop_normal_1/data/acq0001.BSDATA.h5"
except Exception as e:
print("didn't open bsdata due to error {e}") #_logger.error(f"Cannot open {data_file} (due to {e})")
return
jf_path=os.path.join(data_directory,'data','acq{0:0=4d}.{1}.h5'.format(acquisition, detector))
print(jf_path)
try:
#r"/sf/cristallina/data/p21630/raw/run0065-lov_movestop_normal_1/data/acq0001.JF17T16V01.h5"
x = h5py.File(jf_path, "r")
except Exception as e:
print("didn't open JF17T16V01.h5 due to error {e}") #_logger.error(f"Cannot open {data_file} (due to {e})")
return
pulseids_JF = x[f"/entry/xfel/pulseID"][:]
pulseids_BS = f[f"/SAR-CVME-TIFALL6:EvtSet/pulse_id"][:]
power_meter = pd.DataFrame(f[f"/SARES33-GPS:PR1_CH0_VAL_GET/data"][:], columns=['data']) #gass monitor
power_meter.index = f[f"/SARES33-GPS:PR1_CH0_VAL_GET/pulse_id"][:]
pulseids_psss = f[f"/SARFE10-PSSS059:SPECTRUM_Y_SUM/pulse_id"][:] #Group PSSS
evt_set=pd.DataFrame(f[f"/SAR-CVME-TIFALL6:EvtSet/data"][:])
evt_set.index = f[f"/SAR-CVME-TIFALL6:EvtSet/pulse_id"][:]
common_pulses = power_meter.index.intersection(evt_set.index)
power_meter= power_meter.loc[common_pulses]
common_pulses = evt_set.index.intersection(power_meter.index)
evt_set = evt_set.loc[common_pulses]
print(power_meter.index.duplicated())
x=power_meter.index.duplicated()
for i in range(len(power_meter.index)):
if x[i] == True:
print(power_meter.index[i])
fig, ax1 = plt.subplots()
color = 'tab:red'
ax1.set_xlabel('pulse_id')
ax1.set_ylabel('exp', color=color)
ax1.scatter(common_pulses, evt_set.loc[common_pulses][evt_set.columns[63]], color=color)
ax1.tick_params(axis='y', labelcolor=color)
ax1.invert_yaxis()
ax2 = ax1.twinx() # instantiate a second Axes that shares the same x-axis
color = 'tab:blue'
ax2.set_ylabel('sin', color=color) # we already handled the x-label with ax1
ax2.scatter(common_pulses, power_meter.loc[common_pulses]['data'].values, color=color)
ax2.tick_params(axis='y', labelcolor=color)
fig.tight_layout() # otherwise the right y-label is slightly clipped
plt.show()
plt.scatter(power_meter['data'].loc[common_pulses].values, evt_set.loc[common_pulses][evt_set.columns[63]])
#plt.scatter(common_pulses, power_meter['data'].values[1:])
plt.show()
def main(args):
data_directory = args.input_data_directory
try:
output_name = args.output_name
except:
output_name=data_directory.split('/')[-1]
number_of_acqusitions = os.listdir(os.path.join(data_directory,'meta'))
index_light=[]
index_dark=[]
blanks = []
for i in range(len(number_of_acqusitions)):
print(i)
sort_acquisition(data_directory, i)
#index_light+=(acq_light)
#index_dark+=(acq_dark)
#blanks+=(acq_blank)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"-i",
"--input_data_directory",
help="raw data directory and run i.e. /sf/cristallina/data/p21630/raw/run0065-lov_movestop_normal_1/",
type=os.path.abspath,
required=True
)
parser.add_argument(
"-o",
"--ouput_name",
help="the name of the job to be done. if not included file names will be automatically generated based on input run name",
type=str,
)
parser.add_argument(
"-l",
"--log_file_name",
help="the name of the logger.",
type=str,
default='logging_logger_of_logs.log'
)
args = parser.parse_args()
#logfile = args.log_file_name
#logger.add( logfile, format="{message}", level="INFO")
main(args)

27
data/retrieve_data.py Normal file
View File

@@ -0,0 +1,27 @@
import sys, os
print('starting slic')
sys.path.insert(0, os.path.expanduser('/sf/cristallina/applications/mx/_obsolete/slic'))
from slic.utils import json_load, json_save
from slic.core.acquisition.broker.post_retrieve import post_retrieve_acq_jsons, API_ADDR
fn = "/sf/cristallina/data/p22233/raw/run0067-ocp_ech_420nm_1us_trans35_690nJ/meta/acq0006.json"
pulses_per_file=1000
n_missing = 7
acquisition = 20
print('starting loop')
file_names=[]
for i in range(1,n_missing+1):
acq='acq{0:0=4d}'.format(acquisition+i)
d = json_load(fn)
d["start_pulseid"] += i * pulses_per_file
d["stop_pulseid"] += i * pulses_per_file
print(acq)
file_name = f"{acq}.json"
print(file_name)
file_names.append(file_name)
json_save(d,file_name)
post_retrieve_acq_jsons(API_ADDR, file_names, continue_run=True)

166
edge_scan/edge_scan.py Normal file
View File

@@ -0,0 +1,166 @@
#!/usr/bin/env python3
# authors M.Appleby
"""
# aim
Calculate FWHM of x-ray beam from an edge scan
# protocol
complete edge scan
## IMPORTANT ##
- save data as .txt file
# usage
python convert-scan-for-pyfai.py -j <jugfrau-name> -s <path to scan file> -n <name of output file>
# output
creates a .png of fit including FWHM title
"""
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from scipy import stats
from scipy.optimize import curve_fit
from scipy.signal import peak_widths, find_peaks
from scipy import asarray as ar,exp
from math import sqrt, pi
import argparse
def gaus(x, *p):
A, mu, sigma = p
return A * np.exp(-(x-mu)**2/(2.*sigma**2))
def sigmoid(x, *p):
L, x0, k, b = p
return L / (1 + np.exp(-k * (x -x0))) + b
def getFWHM(filename, output_name, motor):
motor = motor.capitalize()
df = pd.read_csv(filename, sep='\t')
#df = df[ [0,1] ]
df = df.set_index( f"SAR-EXPMX:MOT_F{motor}.VAL", drop=True )
print(df)
print(df.columns)
x_label=df.index.name
y_label=df.columns[0]
x_vals = df.index.tolist()
y_vals = df[y_label].tolist()
plt.plot(x_vals, y_vals, label = 'edge_scan')
plt.ylabel('Intensity (counts)')
plt.xlabel('Motor position (mm)')
L_guess = np.max(y_vals)
b_guess = np.min(y_vals)
x0_guess = x_vals[np.argmin(np.abs(y_vals - (L_guess + b_guess) / 2))]
k_guess = 1 / (x_vals[-1] - x_vals[0])
s0 = [L_guess, x0_guess, k_guess, b_guess]
params, params_covariance = curve_fit(sigmoid, x_vals, y_vals, p0=s0)
#L_fit, x0_fit, k_fit, b_fit = params
plt.plot(x_vals, y_vals)
plt.plot(x_vals, sigmoid(x_vals, *params))
plt.show()
y_fit = sigmoid(x_vals, *params)
dydx = np.gradient(y_fit, x_vals)
if motor == 'Y':
dydx=-dydx
mean = x_vals[np.argmax(dydx)]
sigma = (np.max(x_vals) - np.min(x_vals))/4
print(mean, sigma)
A= np.max(dydx)
print(x_vals, '\\', y_vals)
print(dydx)
dydx[np.isnan(dydx)] = 0
print(dydx)
p0 = [A, mean, sigma]
parameters, covariance = curve_fit( gaus, x_vals, dydx, p0=p0 )
x0_op = parameters[1]
sigma_op = parameters[2]
print(parameters)
gauss_y = gaus(x_vals,*parameters)
FWHM_x = np.abs(2*np.sqrt(2*np.log(2))*sigma_op)
plt.plot(x_vals, dydx, label = 'derivative')
plt.plot(x_vals, gauss_y,label='Gaussian fit',color ='orange')
plt.fill_between(x_vals,gauss_y,color='orange',alpha=0.5)
plt.axvspan(x0_op+FWHM_x/2,x0_op-FWHM_x/2, color='green', alpha=0.75, lw=0, label='FWHM = {0}'.format(FWHM_x))
#print(FWHM_x)
#plt.plot(x_label, y_label, data=df)
plt.legend()
plt.show()
# find peak centre
peaks = find_peaks( gauss_y )
fwhm_peak = peak_widths( gauss_y, peaks[0], rel_height=0.5 )
fwhm_str = x_vals[int( round( fwhm_peak[2][0], 0 ))]
fwhm_end = x_vals[int( round( fwhm_peak[3][0], 0 ))]
fwhm = fwhm_str - fwhm_end
fwhm_height = gauss_y[int( round( fwhm_peak[2][0], 0 ))]
# find 1/e2 peak width
full_peak = peak_widths( gauss_y, peaks[0], rel_height=0.865 )
full_str = x_vals[int( round( full_peak[2][0], 0 ))]
full_end = x_vals[int( round( full_peak[3][0], 0 ))]
full = full_str - full_end
full_height = gauss_y[int( round( full_peak[2][0], 0 ))]
print( "FWHM = {0}".format( fwhm ) )
print(FWHM_x)
print( "1/e2 = {0}".format( full ) )
#plt.savefig(output_name+filename.split('/')[-1]+'FWHM_{0}.png'.format(FWHM_x))
return
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
"-p",
"--path",
help="path of input and output file, not currently in use",
type=str,
default="/sf/cristallina/data/p21224/res/pshell/edge_scans/"
)
parser.add_argument(
"-i",
"--input",
help="location of input file",
type=str,
default="/sf/cristallina/data/p21224/res/pshell/edge_scans/0.5_x/0.5_x"
)
parser.add_argument(
"-o",
"--output",
help="output path to save figure",
type=str,
default="/sf/cristallina/data/p21224/res/pshell/edge_scans/"
)
parser.add_argument(
"-m",
"--motor",
help="X or Y",
type=str,
default="X"
)
args = parser.parse_args()
getFWHM(args.input, args.output, args.motor)

View File

@@ -0,0 +1,166 @@
#!/usr/bin/env python3
# authors M.Appleby
"""
# aim
Calculate FWHM of x-ray beam from an edge scan
# protocol
complete edge scan
## IMPORTANT ##
- save data as .txt file
# usage
python convert-scan-for-pyfai.py -j <jugfrau-name> -s <path to scan file> -n <name of output file>
# output
creates a .png of fit including FWHM title
"""
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from scipy import stats
from scipy.optimize import curve_fit
from scipy.signal import peak_widths, find_peaks
from scipy import asarray as ar,exp
from math import sqrt, pi
import argparse
def gaus(x, *p):
A, mu, sigma = p
return A * np.exp(-(x-mu)**2/(2.*sigma**2))
def sigmoid(x, *p):
L, x0, k, b = p
return L / (1 + np.exp(-k * (x -x0))) + b
def getFWHM(filename, output_name, motor):
motor = motor.capitalize()
df = pd.read_csv(filename, sep=',')
#df = df[ [0,1] ]
df = df.set_index( f"x-position", drop=True )
print(df)
print(df.columns)
x_label=df.index.name
y_label=df.columns[0]
x_vals = df.index.tolist()
y_vals = df[y_label].tolist()
plt.plot(x_vals, y_vals, label = 'edge_scan')
plt.ylabel('Intensity (counts)')
plt.xlabel('Motor position (mm)')
L_guess = np.max(y_vals)
b_guess = np.min(y_vals)
x0_guess = x_vals[np.argmin(np.abs(y_vals - (L_guess + b_guess) / 2))]
k_guess = 1 / (x_vals[-1] - x_vals[0])
s0 = [L_guess, x0_guess, k_guess, b_guess]
params, params_covariance = curve_fit(sigmoid, x_vals, y_vals, p0=s0)
#L_fit, x0_fit, k_fit, b_fit = params
plt.plot(x_vals, y_vals)
plt.plot(x_vals, sigmoid(x_vals, *params))
plt.show()
y_fit = sigmoid(x_vals, *params)
dydx = np.gradient(y_fit, x_vals)
if motor == 'Y':
dydx=-dydx
mean = x_vals[np.argmax(dydx)]
sigma = (np.max(x_vals) - np.min(x_vals))/4
print(mean, sigma)
A= np.max(dydx)
print(x_vals, '\\', y_vals)
print(dydx)
dydx[np.isnan(dydx)] = 0
print(dydx)
p0 = [A, mean, sigma]
parameters, covariance = curve_fit( gaus, x_vals, dydx, p0=p0 )
x0_op = parameters[1]
sigma_op = parameters[2]
print(parameters)
gauss_y = gaus(x_vals,*parameters)
FWHM_x = np.abs(2*np.sqrt(2*np.log(2))*sigma_op)
plt.plot(x_vals, dydx, label = 'derivative')
plt.plot(x_vals, gauss_y,label='Gaussian fit',color ='orange')
plt.fill_between(x_vals,gauss_y,color='orange',alpha=0.5)
plt.axvspan(x0_op+FWHM_x/2,x0_op-FWHM_x/2, color='green', alpha=0.75, lw=0, label='FWHM = {0}'.format(FWHM_x))
#print(FWHM_x)
#plt.plot(x_label, y_label, data=df)
plt.legend()
plt.show()
# find peak centre
peaks = find_peaks( gauss_y )
fwhm_peak = peak_widths( gauss_y, peaks[0], rel_height=0.5 )
fwhm_str = x_vals[int( round( fwhm_peak[2][0], 0 ))]
fwhm_end = x_vals[int( round( fwhm_peak[3][0], 0 ))]
fwhm = fwhm_str - fwhm_end
fwhm_height = gauss_y[int( round( fwhm_peak[2][0], 0 ))]
# find 1/e2 peak width
full_peak = peak_widths( gauss_y, peaks[0], rel_height=0.865 )
full_str = x_vals[int( round( full_peak[2][0], 0 ))]
full_end = x_vals[int( round( full_peak[3][0], 0 ))]
full = full_str - full_end
full_height = gauss_y[int( round( full_peak[2][0], 0 ))]
print( "FWHM = {0}".format( fwhm ) )
print(FWHM_x)
print( "1/e2 = {0}".format( full ) )
#plt.savefig(output_name+filename.split('/')[-1]+'FWHM_{0}.png'.format(FWHM_x))
return
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
"-p",
"--path",
help="path of input and output file, not currently in use",
type=str,
default="/sf/cristallina/data/p21224/res/pshell/edge_scans/"
)
parser.add_argument(
"-i",
"--input",
help="location of input file",
type=str,
default="/sf/cristallina/data/p21224/res/pshell/edge_scans/0.5_x/0.5_x"
)
parser.add_argument(
"-o",
"--output",
help="output path to save figure",
type=str,
default="/sf/cristallina/data/p21224/res/pshell/edge_scans/"
)
parser.add_argument(
"-m",
"--motor",
help="X or Y",
type=str,
default="X"
)
args = parser.parse_args()
getFWHM(args.input, args.output, args.motor)

155
edge_scan/edge_scan_slic.py Normal file
View File

@@ -0,0 +1,155 @@
#!/usr/bin/env python3
# authors M.Appleby + some code donated by J.Beale
"""
Martin needs to tidy this
# aim
Calculate FWHM of x-ray beam from an edge scan
# protocol
give run directory
To run and edge scan in Slic - may be a limit on how far and fast motor can move
# to execute directly in slic:
from devices.knife_edge import KnifeEdge
kn = KnifeEdge
scan = Scanner(default_acquisitions=[daq], condition=None)
scan.scan1D(kn.x, 4, 5, 0.1, 10, 'test_knife_edge_evr_only', detectors=[], channels=["SARES30-LSCP1-FNS:CH5:VAL_GET"], pvs=[])
"""
import os, glob
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from scipy import stats
from scipy.optimize import curve_fit
from scipy.signal import peak_widths, find_peaks
from scipy import asarray as ar,exp
from math import sqrt, pi
from scipy.signal import savgol_filter
import argparse
from sfdata import SFDataFiles, SFScanInfo
def loadData(directory):
num_of_acqs=len([name for name in os.listdir(os.path.join(directory,'data'))])
print(num_of_acqs)
diode_data = []
for acquisition in range(1,num_of_acqs+1):
with SFDataFiles(os.path.join(directory,'data','acq{:04}.BSDATA.h5'.format(acquisition))) as data:
diode = data["SARES30-LSCP1-FNS:CH5:VAL_GET"].data
diode = diode.mean()
diode_data.append(diode)
scan = SFScanInfo(os.path.join(directory,'meta','scan.json'))
motor_positions = scan.readbacks
print(motor_positions)
print(len(diode_data), len(motor_positions))
return motor_positions, diode_data
def gauss(x, *p):
A, mu, sigma = p
return A * np.exp(-(x-mu)**2/(2.*sigma**2))
#def gauss(x, C, A, x0, sigma):
# return C + A*np.exp(-(x-x0)**2/(2.*sigma**2))
def getFWHM(motor_positions, diode_data):
#print(df.columns)
x_label='x motor'
y_label='diode'
x_vals = motor_positions*1000
y_vals = [i for i in diode_data]
y_vals = savgol_filter(y_vals, 15, 3)
fig, ax1 = plt.subplots()
ax1.scatter(x_vals, y_vals, label='edge_scan', color='blue')
plt.ylabel('Intensity (counts)')
plt.xlabel('Motor position (um)')
#plt.show()
dydx = np.gradient(y_vals, x_vals)
#dydx = dydx/max(dydx)
mean = sum((x_vals)*dydx)/sum(dydx)
sigma = sum(dydx*(mean)**2)/sum(dydx)
A= 1/(sigma*2*sqrt(pi))
dydx[np.isnan(dydx)] = 0
p0 = [100, mean, 5 ]
parameters, covariance = curve_fit( gauss, x_vals, dydx, p0=p0 )
x0_op = parameters[1]
# gaussian fit
gauss_y = gauss(x_vals, *parameters)
FWHM_x = np.abs(2*np.sqrt(2*np.log(2))*parameters[2])
# find peak centre
peaks = find_peaks( gauss_y )
# find fwhm peak width
fwhm_peak = peak_widths( gauss_y, peaks[0], rel_height=0.5 )
#fwhm_str = x_vals[int( round( fwhm_peak[2][0], 0 ))]
#fwhm_end = x_vals[int( round( fwhm_peak[3][0], 0 ))]
#fwhm = fwhm_str - fwhm_end
#fwhm_height = gauss_y[int( round( fwhm_peak[2][0], 0 ))]
# find 1/e2 peak width
#full_peak = peak_widths( gauss_y, peaks[0], rel_height=0.865 )
#full_str = x_vals[int( round( full_peak[2][0], 0 ))]
#full_end = x_vals[int( round( full_peak[3][0], 0 ))]
#full = full_str - full_end
#full_height = gauss_y[int( round( full_peak[2][0], 0 ))]
#print( "FWHM = {0}".format( fwhm ) )
print(FWHM_x)
#print( "1/e2 = {0}".format( full ) )
# plot
ax2 = ax1.twinx()
ax2.plot(x_vals, dydx,label ='derivative',color='red')
ax2.plot(x_vals,gauss_y,label='Gaussian fit',color ='orange')
plt.fill_between(x_vals,gauss_y,color='orange',alpha=0.5)
plt.axvspan(x0_op+FWHM_x/2,x0_op-FWHM_x/2, color='blue', alpha=0.75, lw=0, label='FWHM = {0}'.format(FWHM_x))
#plt.hlines(fwhm_height, fwhm_str, fwhm_end, color='green', label='FWHM (find_peaks) = {0}'.format(fwhm))
#plt.hlines(full_height, full_str, full_end, color='yellow', label='1/e2 = {0}'.format(full))
fig.legend(loc="upper left")
fig.tight_layout()
plt.show()
#plt.savefig(output_name+filename.split('/')[-1]+'FWHM_{0}.png'.format(sciFWHM_x))
return
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
"-i",
"--input",
help="location of input file",
type=str,
default="/sf/cristallina/data/p21736/raw/run0002/"
)
parser.add_argument(
"-t",
"--type",
help="laser or xray",
type=str,
#default="/sf/cristallina/data/p19150/raw/run0371/"
)
args = parser.parse_args()
x_data, y_data = loadData(args.input)
getFWHM(x_data, y_data)

185
edge_scan/edge_scan_v2.py Normal file
View File

@@ -0,0 +1,185 @@
#!/usr/bin/env python3
# authors M.Appleby
"""
# aim
Calculate FWHM of x-ray beam from an edge scan
# protocol
complete edge scan
## IMPORTANT ##
- save data as .txt file
# usage
python convert-scan-for-pyfai.py -j <jugfrau-name> -s <path to scan file> -n <name of output file>
# output
creates a .png of fit including FWHM title
"""
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from scipy import stats
from scipy.optimize import curve_fit
from scipy.signal import peak_widths, find_peaks
from scipy import asarray as ar,exp
from math import sqrt, pi
import argparse
def loadData(directory): # for data taken by slic
num_of_acqs=len([name for name in os.listdir(os.path.join(directory,'data'))])
print(num_of_acqs)
diode_data = []
for acquisition in range(1,num_of_acqs+1):
with SFDataFiles(os.path.join(directory,'data','acq{:04}.BSDATA.h5'.format(acquisition))) as data:
diode = data["SARES30-LSCP1-FNS:CH5:VAL_GET"].data
diode = diode.mean()
diode_data.append(diode)
scan = SFScanInfo(os.path.join(directory,'meta','scan.json'))
motor_positions = scan.readbacks
print(motor_positions)
print(len(diode_data), len(motor_positions))
return motor_positions, diode_data
def gaus(x, *p):
A, mu, sigma = p
return A * np.exp(-(x-mu)**2/(2.*sigma**2))
def sigmoid(x, *p):
L, x0, k, b = p
return L / (1 + np.exp(-k * (x -x0))) + b
def getFWHM(filename, output_name, motor, laser):
motor = motor.capitalize()
df = pd.read_csv(filename, sep='\t')
if laser:
df = df.set_index( f"x-position", drop=True )
else:
df = df.set_index( f"SAR-EXPMX:MOT_F{motor}.VAL", drop=True )
#df = df.set_index( f"SAR-EXPMX:MOT_F{motor}.VAL", drop=True )
x_label=df.index.name
y_label=df.columns[0]
x_vals = df.index.tolist()
y_vals = df[y_label].tolist()
plt.plot(x_vals, y_vals, label = 'edge_scan')
plt.ylabel('Intensity (counts)')
plt.xlabel('Motor position (mm)')
L_guess = np.max(y_vals)
b_guess = np.min(y_vals)
x0_guess = x_vals[np.argmin(np.abs(y_vals - (L_guess + b_guess) / 2))]
k_guess = 1 / (x_vals[-1] - x_vals[0])
s0 = [L_guess, x0_guess, k_guess, b_guess]
params, params_covariance = curve_fit(sigmoid, x_vals, y_vals, p0=s0)
#L_fit, x0_fit, k_fit, b_fit = params
plt.plot(x_vals, y_vals)
plt.plot(x_vals, sigmoid(x_vals, *params))
plt.show()
y_fit = sigmoid(x_vals, *params)
dydx = np.gradient(y_fit, x_vals)
if motor == 'Y':
dydx=-dydx
mean = x_vals[np.argmax(dydx)]
sigma = (np.max(x_vals) - np.min(x_vals))/4
print(mean, sigma)
A= np.max(dydx)
print(x_vals, '\\', y_vals)
print(dydx)
dydx[np.isnan(dydx)] = 0
print(dydx)
p0 = [A, mean, sigma]
parameters, covariance = curve_fit( gaus, x_vals, dydx, p0=p0 )
x0_op = parameters[1]
sigma_op = parameters[2]
print(parameters)
gauss_y = gaus(x_vals,*parameters)
FWHM_x = np.abs(2*np.sqrt(2*np.log(2))*sigma_op)
plt.plot(x_vals, dydx, label = 'derivative')
plt.plot(x_vals, gauss_y,label='Gaussian fit',color ='orange')
plt.fill_between(x_vals,gauss_y,color='orange',alpha=0.5)
plt.axvspan(x0_op+FWHM_x/2,x0_op-FWHM_x/2, color='green', alpha=0.75, lw=0, label='FWHM = {0}'.format(FWHM_x))
plt.legend()
plt.show()
# find peak centre
peaks = find_peaks( gauss_y )
fwhm_peak = peak_widths( gauss_y, peaks[0], rel_height=0.5 )
fwhm_str = x_vals[int( round( fwhm_peak[2][0], 0 ))]
fwhm_end = x_vals[int( round( fwhm_peak[3][0], 0 ))]
fwhm = fwhm_str - fwhm_end
fwhm_height = gauss_y[int( round( fwhm_peak[2][0], 0 ))]
# find 1/e2 peak width
full_peak = peak_widths( gauss_y, peaks[0], rel_height=0.865 )
full_str = x_vals[int( round( full_peak[2][0], 0 ))]
full_end = x_vals[int( round( full_peak[3][0], 0 ))]
full = full_str - full_end
full_height = gauss_y[int( round( full_peak[2][0], 0 ))]
print( "FWHM = {0}".format( fwhm ) )
print(FWHM_x)
print( "1/e2 = {0}".format( full ) )
plt.savefig(output_name+filename.split('/')[-1]+'FWHM_{0}.png'.format(FWHM_x))
return
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
"-p",
"--path",
help="path of input and output file, not currently in use",
type=str,
default="/sf/cristallina/data/p21224/res/pshell/edge_scans/"
)
parser.add_argument(
"-i",
"--input",
help="location of input file",
type=str,
default="/sf/cristallina/data/p21224/res/pshell/edge_scans/0.5_x/0.5_x"
)
parser.add_argument(
"-o",
"--output",
help="output path to save figure",
type=str,
default="/sf/cristallina/data/p21224/res/pshell/edge_scans/"
)
parser.add_argument(
"-m",
"--motor",
help="X or Y",
type=str,
default="X"
)
parser.add_argument(
"-l",
"--laser",
help="True or False",
action='store_true'
)
args = parser.parse_args()
getFWHM(args.input, args.output, args.motor, args.laser)

View File

@@ -0,0 +1,19 @@
In [1]: from slic.core.acquisition.broker import restapi
...:
In [2]: address = "http://sf-daq:10003"
...:
In [3]: detector = "JF17T16V01"
...:
In [4]: current = restapi.get_dap_settings(address, detector)
...:
'aggregation_max': 1, 'apply_additional_mask': 0, 'apply_aggregation': 0, 'apply_threshold': 0, 'beam_center_x': 1612.0, 'beam_center_y': 1666.0, 'beam_energy': 11000.000000000000, 'detector_distance': 0.220, 'detector_rate': 100, 'disabled_modules': [], 'do_peakfinder_analysis': 1, 'do_radial_integration': 0, 'do_spi_analysis': 0, 'double_pixels': 'mask', 'hitfinder_adc_thresh': 600.0, 'hitfinder_min_pix_count': 4, 'hitfinder_min_snr': 4.0, 'npeaks_threshold_hit': 20, 'roi_x1': [], 'roi_x2': [], 'roi_y1': [], 'roi_y2': [], 'select_only_ppicker_events': 0, 'threshold_max': 0, 'threshold_min': 0.0, 'threshold_value': 'NaN')
parameters = restapi.make_dap_parameters(hitfinder_adc_thresh=600, beam_energy=11000.00000, detector_distance=0.220)
restapi.set_dap_settings(address, detector, parameters)

129
jfj/jfjoch_device.py Normal file
View File

@@ -0,0 +1,129 @@
import sys
sys.path.append('/sf/cristallina/applications/mx/jungfraujoch_openapi_python')
from datetime import datetime
from time import sleep
import string
import openapi_client
from openapi_client.models.dataset_settings import DatasetSettings
from openapi_client.rest import ApiException
ERROR_STATUS_WAITING = 504
ALLOWED_CHARS = set(
string.ascii_letters + string.digits + "_-+/"
)
def character_cleanup(s, default="_", allowed=ALLOWED_CHARS):
return "".join(i if i in allowed else default for i in s)
class JFJ:
def __init__(self, host):
configuration = openapi_client.Configuration(
host = host
)
api_client = openapi_client.ApiClient(configuration)
self.api = openapi_client.DefaultApi(api_client)
self.actually_init()
def actually_init(self):
status = self.get_daq_status()
print(f"{status=}")
if status != "Idle":
self.api.initialize_post()
self.actually_wait_till_done()
status = self.get_daq_status()
if status != "Idle":
raise RuntimeError(f"status is not Idle but {status}")
def actually_wait_till_done(self):
while True:
try:
done = self.api.wait_till_done_post()
except Exception as e:
if e.status != ERROR_STATUS_WAITING:
print(e)
raise e
else:
#TODO: use number_of_triggers_left for progress...
if done is None: # this seems to be coming instead of: status == 200
return
#sleep(0.1) #TODO
def get_daq_status(self):
return self.api.status_get().state
def get_detector_status(self):
return self.api.detector_status_get()#.state
def get_detectors(self):
return self.api.config_select_detector_get()
def get_detector_config(self):
return self.api.config_detector_get()
def acquire(self, file_prefix, **kwargs):
#print('got here')
status = self.get_daq_status()
if status != "Idle":
raise RuntimeError(f"status is not Idle but {status}")
file_prefix = character_cleanup(file_prefix)
#print('did a filename check')
dataset_settings = openapi_client.DatasetSettings(file_prefix=file_prefix, **kwargs)
#print('dataset_settings set')
self.api.start_post(dataset_settings=dataset_settings)
#print('did start_post')
#self.actually_wait_till_done()
def take_pedestal(self):
self.api.pedestal_post()
self.actually_wait_till_done()
if __name__ == "__main__":
#jfj = JFJ("http://sf-daq-2:5232")
jfj = JFJ("http://sf-daq-2:8080")
dets = jfj.get_detectors()
print(f"{dets=}")
cfg = jfj.get_detector_config()
print(f"{cfg=}")
stat = jfj.get_detector_status()
print(f"{stat=}")
#jfj.take_pedestal()
now = datetime.now().strftime("%Y%m%d-%H%M")
name = f"test_{now}"
# name = f"!@#$%"
jfj.acquire(
beam_x_pxl = 1613,
beam_y_pxl = 1666,
detector_distance_mm = 151,
incident_energy_keV = 10.9,
sample_name = name,
file_prefix = name,
ntrigger = 10
)
#["images_per_trigger", "ntrigger", "summation", "beam_x_pxl", "beam_y_pxl", "detector_distance_mm", "incident_energy_keV", "file_prefix", "images_per_file", "space_group_number", "sample_name", "fpga_output", "compression", "total_flux", "transmission", "goniometer", "header_appendix", "image_appendix", "energy_multiplier", "data_reduction_factor_serialmx", "run_number", "experiment_group", "unit_cell"]

51
read_jfjoch_bs.py Normal file
View File

@@ -0,0 +1,51 @@
#!/usr/bin/env python3
# authors M.Appleby T. Mason J Beale M. Kepla
"""
# aim
Calculate FWHM of x-ray beam from an edge scan
# protocol
complete edge scan
## IMPORTANT ##
- save data as .txt file
# usage
python convert-scan-for-pyfai.py -j <jugfrau-name> -s <path to scan file> -n <name of output file>
# output
creates a .png of fit including FWHM title
"""
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from scipy import stats
from scipy.optimize import curve_fit
from scipy.signal import peak_widths, find_peaks
from scipy import asarray as ar,exp
from math import sqrt, pi
import argparse
def loadData(directory): # for data taken by slic
num_of_acqs=len([name for name in os.listdir(os.path.join(directory,'data'))])
print(num_of_acqs)
bkg_estimate_data = []
for acquisition in range(1,num_of_acqs+1):
with SFDataFiles(os.path.join(directory,'data','acq{:04}.BSDATA.h5'.format(acquisition))) as data:
bkg_estimate = data["JF17T16V01j:bkg_estimate"].data
#diode = diode.mean()
bkg_estimate_data.append(bkg_estimate)
scan = SFScanInfo(os.path.join(directory,'meta','scan.json'))
motor_positions = scan.readbacks
print(motor_positions)
print(len(bkg_estimate_data), len(motor_positions))
return motor_positions, bkg_estimate_data
directory="path/to/dir"
load_data(directory)

5
results.sh Executable file
View File

@@ -0,0 +1,5 @@
#!/bin/bash
db=/sf/cristallina/applications/mx/clara_tools/mxdbclient/src/
env PYTHONPATH=$db /sf/cristallina/applications/mx/conda/miniconda/envs/39clara/bin/python /sf/cristallina/applications/mx/clara/src/results.py /sf/cristallina/data/p21981/res/run0153-hewl_cover_blue_roll/run0153_acq0018_20241113_182301/run0153_acq0018_off.stream off

View File

@@ -0,0 +1,37 @@
"""notes for gui """
import epics
#import time
"""#pulsepicker high/low
SARES30-LTIM01-EVR0:RearUniv0_SNUMPD = 1
SARES30-LTIM01-EVR0:RearUniv0_SNUMPD2 = 1
SARES30-LTIM01-EVR0:RearUniv0_SOURCE = HIGH (3)
SARES30-LTIM01-EVR0:RearUniv0_SOURCE2 = LOW (4)
#Pulsepicker pulsar
SARES30-LTIM01-EVR0:RearUniv0_SNUMPD = 3
SARES30-LTIM01-EVR0:RearUniv0_SNUMPD2 = 3
SARES30-LTIM01-EVR0:RearUniv0_SOURCE = PULSER (0)
SARES30-LTIM01-EVR0:RearUniv0_SOURCE2 = PULSER (0)
"""
def start_exp():
epics.caput("SARES30-LTIM01-EVR0:RearUniv0_SNUMPD", 3)
epics.caput("SARES30-LTIM01-EVR0:RearUniv0_SNUMPD2", 3)
epics.caput("SARES30-LTIM01-EVR0:RearUniv0_SOURCE", 0)
epics.caput("SARES30-LTIM01-EVR0:RearUniv0_SOURCE2", 0)
epics.caput("SARES30-LTIM01-EVR0:RearUniv0-Ena-SP", 1)
def end_exp():
epics.caput("SARES30-LTIM01-EVR0:RearUniv0_SNUMPD", 1)
epics.caput("SARES30-LTIM01-EVR0:RearUniv0_SNUMPD2", 1)
epics.caput("SARES30-LTIM01-EVR0:RearUniv0_SOURCE", 3)
epics.caput("SARES30-LTIM01-EVR0:RearUniv0_SOURCE2", 4)
epics.caput("SARES30-LTIM01-EVR0:RearUniv0-Ena-SP", 0)
start_exp()
#time.wait(2)
end_exp()