add new geom file
This commit is contained in:
12
src/10.cell
Normal file
12
src/10.cell
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
CrystFEL unit cell file version 1.0
|
||||||
|
|
||||||
|
lattice_type = cubic
|
||||||
|
centering = P
|
||||||
|
|
||||||
|
|
||||||
|
a = 1.0 A
|
||||||
|
b = 2.0 A
|
||||||
|
c = 3.0 A
|
||||||
|
al = 90.0 deg
|
||||||
|
be = 90.0 deg
|
||||||
|
ga = 90.0 deg
|
||||||
38
src/10_jf.geom
Normal file
38
src/10_jf.geom
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
; PSI JF8M
|
||||||
|
|
||||||
|
|
||||||
|
; Camera length (in m) and photon energy (eV)
|
||||||
|
clen = 0.15150000154972076
|
||||||
|
photon_energy = 12000.0
|
||||||
|
flag_lessthan = -32768
|
||||||
|
|
||||||
|
adu_per_eV = 0.00008285
|
||||||
|
res = 13333.3 ; 75.00000356230885 micron pixel size
|
||||||
|
|
||||||
|
rigid_group_0 = 0
|
||||||
|
rigid_group_collection_0 = 0
|
||||||
|
|
||||||
|
; These lines describe the data layout for the JF native multi-event files
|
||||||
|
dim0 = %
|
||||||
|
dim1 = ss
|
||||||
|
dim2 = fs
|
||||||
|
data = /entry/data/data
|
||||||
|
|
||||||
|
|
||||||
|
;mask_file =/sf/cristallina/data/p21981/raw/run0010-6084triggers10wait/data/acq_datmaster.h5
|
||||||
|
;mask = /entry/instrument/detector/pixel_mask
|
||||||
|
;mask_good = 0x0
|
||||||
|
;mask_bad = 0xFFFFFFFF
|
||||||
|
|
||||||
|
; corner_{x,y} set the position of the corner of the detector (in pixels)
|
||||||
|
; relative to the beam
|
||||||
|
|
||||||
|
0/min_fs = 0
|
||||||
|
0/min_ss = 0
|
||||||
|
0/max_fs =3212
|
||||||
|
0/max_ss =3332
|
||||||
|
0/corner_x = -1613.0
|
||||||
|
0/corner_y = -1666.0
|
||||||
|
0/fs = x
|
||||||
|
0/ss = y
|
||||||
|
|
||||||
151
src/clara.py
151
src/clara.py
@@ -11,14 +11,19 @@ import subprocess as sub
|
|||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import zmq
|
import zmq
|
||||||
|
import re
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
|
|
||||||
import receive_msg
|
import receive_msg
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#sys.path.insert(0, os.path.expanduser('/sf/cristallina/applications/mx/jungfraujoch_openapi_python/sf_datafiles'))
|
||||||
|
#from sfdata import SFDataFiles
|
||||||
#define log file place:
|
#define log file place:
|
||||||
LOG_FILENAME = time.strftime("/sf/cristallina/data/p21981/res/clara_%Y%m.log")
|
LOG_FILENAME = time.strftime("/sf/cristallina/data/p22217/res/clara_%Y%m.log")
|
||||||
|
|
||||||
|
|
||||||
logger.add(LOG_FILENAME, level="INFO", rotation="100MB")
|
logger.add(LOG_FILENAME, level="INFO", rotation="100MB")
|
||||||
@@ -113,7 +118,7 @@ class CollectedH5:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.info("Could not cd into processing directory: {}".format(e))
|
logger.info("Could not cd into processing directory: {}".format(e))
|
||||||
|
|
||||||
return None
|
return out_path
|
||||||
|
|
||||||
|
|
||||||
def convert_spg_num(self, sg: int):
|
def convert_spg_num(self, sg: int):
|
||||||
@@ -304,8 +309,8 @@ class CollectedH5:
|
|||||||
f2.write("flag_lessthan = " + str(self.message["underload"]) + "\n")
|
f2.write("flag_lessthan = " + str(self.message["underload"]) + "\n")
|
||||||
f2.write("\n")
|
f2.write("\n")
|
||||||
#f2.write("adu_per_eV = 0.00008065\n")
|
#f2.write("adu_per_eV = 0.00008065\n")
|
||||||
f2.write("adu_per_eV = 0.00008285\n")
|
#f2.write("adu_per_eV = 1.0\n")
|
||||||
# f2.write("adu_per_photon = 1\n")
|
f2.write("adu_per_photon = 1\n")
|
||||||
f2.write("res = 13333.3 ; " + str(self.message["pixel_size_m"] *1000000) + " micron pixel size\n")
|
f2.write("res = 13333.3 ; " + str(self.message["pixel_size_m"] *1000000) + " micron pixel size\n")
|
||||||
f2.write("\n")
|
f2.write("\n")
|
||||||
f2.write("rigid_group_0 = 0 \n")
|
f2.write("rigid_group_0 = 0 \n")
|
||||||
@@ -353,6 +358,36 @@ class CollectedH5:
|
|||||||
f2.close()
|
f2.close()
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def copy_and_edit_geom_file_from_master(self, out_path):
|
||||||
|
output_file_path = os.path.join(out_path, 'JF17T16V01.geom')
|
||||||
|
geom_file_location = str(self.message["user_data"]["geometry_file_location"])
|
||||||
|
pgroup = str(self.message["experiment_group"])
|
||||||
|
res = "res"
|
||||||
|
geom_file_path = pa / pgroup / res / geom_file_location
|
||||||
|
#geom_file_location = '/sf/cristallina/data/p21981/work/crystfel_test/JF17T16V01_jfj.geom'
|
||||||
|
with open(geom_file_path, 'r') as file:
|
||||||
|
lines=file.read()
|
||||||
|
detector_distance = str(float(self.message["detector_distance_m"])-0.0002)
|
||||||
|
photon_energy = str(self.message["incident_energy_eV"])
|
||||||
|
pattern_energy =r'photon_energy\s=\s(\d+)\seV\n'
|
||||||
|
energy = f'photon_energy = {photon_energy} eV\n'
|
||||||
|
lines_new = re.sub(pattern_energy,energy, lines)
|
||||||
|
pattern_clen =r'clen\s=\s(\d+.\d+)\n'
|
||||||
|
clen = f'clen = {detector_distance}\n'
|
||||||
|
lines_final = re.sub(pattern_clen, clen, lines_new)
|
||||||
|
#clen = str(self.message["detector_distance_m"])
|
||||||
|
#photon_energy = str(self.message["incident_energy_eV"])
|
||||||
|
#lines[0] = f'photon_energy = {photon_energy} eV\n'
|
||||||
|
#lines[1] = f'clen = {clen}\n'
|
||||||
|
|
||||||
|
#lines[0] = 'photon_energy = 11000 eV\n'
|
||||||
|
#lines[1] = 'clen = 0.116\n'
|
||||||
|
|
||||||
|
with open(output_file_path, 'w') as file:
|
||||||
|
file.writelines(lines_final)
|
||||||
|
|
||||||
|
return output_file_path
|
||||||
|
|
||||||
def create_list_file(self):
|
def create_list_file(self):
|
||||||
"""
|
"""
|
||||||
Function to generate a list file with the path of the input H5 file
|
Function to generate a list file with the path of the input H5 file
|
||||||
@@ -377,12 +412,14 @@ class CollectedH5:
|
|||||||
bsdata_name = Path(filen[:-15]+'.BSDATA.h5')
|
bsdata_name = Path(filen[:-15]+'.BSDATA.h5')
|
||||||
|
|
||||||
bsdata_path = slash / bsdata_name
|
bsdata_path = slash / bsdata_name
|
||||||
|
|
||||||
print(bsdata_path)
|
logger.info("Waiting for bs_data to exist")
|
||||||
|
|
||||||
while not bsdata_path.exists():
|
while not bsdata_path.exists():
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
print('survived the while loop')
|
|
||||||
try:
|
try:
|
||||||
|
time.sleep(10)
|
||||||
|
#bsdata = SFDataFiles(bsdata_path)
|
||||||
bsdata = h5py.File(bsdata_path, "r") #r"/sf/cristallina/data/p21630/raw/run0065-lov_movestop_normal_1/data/acq0001.BSDATA.h5"
|
bsdata = h5py.File(bsdata_path, "r") #r"/sf/cristallina/data/p21630/raw/run0065-lov_movestop_normal_1/data/acq0001.BSDATA.h5"
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"didn't open bsdata due to error {e}") #_logger.error(f"Cannot open {data_file} (due to {e})")
|
print(f"didn't open bsdata due to error {e}") #_logger.error(f"Cannot open {data_file} (due to {e})")
|
||||||
@@ -396,6 +433,7 @@ class CollectedH5:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
x = h5py.File(jf_path, "r")
|
x = h5py.File(jf_path, "r")
|
||||||
|
#data = SFDataFiles(bsdata_path, jf_path)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"didn't open JF17T16V01.h5 due to error {e}") #_logger.error(f"Cannot open {data_file} (due to {e})")
|
print(f"didn't open JF17T16V01.h5 due to error {e}") #_logger.error(f"Cannot open {data_file} (due to {e})")
|
||||||
return
|
return
|
||||||
@@ -406,29 +444,71 @@ class CollectedH5:
|
|||||||
index_dark = []
|
index_dark = []
|
||||||
index_light = []
|
index_light = []
|
||||||
blanks = []
|
blanks = []
|
||||||
|
#print(evt_set)
|
||||||
for i in range(n_pulse_id):
|
|
||||||
|
|
||||||
p = pulseids_JF[i]
|
_inters, ind_JF, ind_BS = np.intersect1d(pulseids_JF, pulseids_BS, return_indices=True)
|
||||||
q = pulseids_BS[i]
|
#evt_set = evt_set[ind_BS]
|
||||||
if p != q:
|
#print(len(evt_set))
|
||||||
event_i = np.where(pulseids_JF == q)[0]
|
|
||||||
event_i = i
|
#print(min(ind_BS),max(ind_BS),len(ind_BS))
|
||||||
else:
|
|
||||||
event_i=i
|
|
||||||
|
|
||||||
events=evt_set[event_i]
|
|
||||||
|
|
||||||
|
for idx_JF, idx_BS in zip(ind_JF, ind_BS):
|
||||||
|
events = evt_set[idx_BS]
|
||||||
|
|
||||||
trigger_event = int(self.message["user_data"]["trigger_event"])
|
trigger_event = int(self.message["user_data"]["trigger_event"])
|
||||||
|
|
||||||
if events[trigger_event] and events[200]:
|
if trigger_event == 63:
|
||||||
index_light.append(i)
|
|
||||||
|
if events[trigger_event] and events[200]:
|
||||||
|
index_dark.append(idx_JF)
|
||||||
|
|
||||||
elif events[200]:
|
elif events[200]:
|
||||||
index_dark.append(i)
|
index_light.append(idx_JF)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
blanks.append(i)
|
blanks.append(idx_JF)
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
if events[trigger_event] and events[200]:
|
||||||
|
index_light.append(idx_JF)
|
||||||
|
|
||||||
|
elif events[200]:
|
||||||
|
index_dark.append(idx_JF)
|
||||||
|
|
||||||
|
else:
|
||||||
|
blanks.append(idx_JF)
|
||||||
|
|
||||||
|
|
||||||
|
#for i in range(n_pulse_id):
|
||||||
|
|
||||||
|
# p = pulseids_JF[i]
|
||||||
|
# q = pulseids_BS[i]
|
||||||
|
# if p != q:
|
||||||
|
# #event_i = np.where(pulseids_JF == q)[0] for normal detector
|
||||||
|
# event_i = np.where(pulseids_BS == p)[0][0]#[::2] #[0][0]
|
||||||
|
# #event_i = i
|
||||||
|
# else:
|
||||||
|
# event_i=i#
|
||||||
|
|
||||||
|
# events=evt_set[event_i]
|
||||||
|
#print(events)
|
||||||
|
#print(len(events))
|
||||||
|
#print(event_i)
|
||||||
|
#print(p,q)
|
||||||
|
#print(i)
|
||||||
|
|
||||||
|
# trigger_event = int(self.message["user_data"]["trigger_event"])
|
||||||
|
|
||||||
|
# if self.message["user_data"]["trigger_flag"]:
|
||||||
|
# if events[trigger_event] and events[200]:
|
||||||
|
# index_light.append(i)
|
||||||
|
|
||||||
|
# elif events[200]:
|
||||||
|
# index_dark.append(i)
|
||||||
|
|
||||||
|
# else:
|
||||||
|
# blanks.append(i)
|
||||||
|
|
||||||
bsdata.close()
|
bsdata.close()
|
||||||
x.close()
|
x.close()
|
||||||
@@ -470,7 +550,7 @@ class CollectedH5:
|
|||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def create_slurm_script(self,trigger):
|
def create_slurm_script(self,trigger, geometry_file_path):
|
||||||
"""
|
"""
|
||||||
Creates the input SLURM file with the following info:
|
Creates the input SLURM file with the following info:
|
||||||
SLURM parameters ( CPUS , nodes, etc)
|
SLURM parameters ( CPUS , nodes, etc)
|
||||||
@@ -499,6 +579,7 @@ class CollectedH5:
|
|||||||
#f.write("#SBATCH --nodelist=sf-cn-21 \n")
|
#f.write("#SBATCH --nodelist=sf-cn-21 \n")
|
||||||
# uncomment if on RA
|
# uncomment if on RA
|
||||||
# f.write("#SBATCH --partition=hour \n")
|
# f.write("#SBATCH --partition=hour \n")
|
||||||
|
#f.write("#SBATCH --account=p22217 \n")
|
||||||
f.write("#SBATCH --cpus-per-task=32 \n")
|
f.write("#SBATCH --cpus-per-task=32 \n")
|
||||||
# f.write("#SBATCH --output=" + LOG_FILENAME + "\n")
|
# f.write("#SBATCH --output=" + LOG_FILENAME + "\n")
|
||||||
# f.write("#SBATCH --open-mode=append \n")
|
# f.write("#SBATCH --open-mode=append \n")
|
||||||
@@ -508,6 +589,7 @@ class CollectedH5:
|
|||||||
f.write("module purge \n")
|
f.write("module purge \n")
|
||||||
f.write("module use MX unstable \n")
|
f.write("module use MX unstable \n")
|
||||||
f.write("module load crystfel/0.10.2-rhel8 \n")
|
f.write("module load crystfel/0.10.2-rhel8 \n")
|
||||||
|
#f.write("module load crystfel/0.11.1 \n")
|
||||||
# f.write(
|
# f.write(
|
||||||
# "module load crystfel/0.10.1-2 xgandalf/2018.01 HDF5_bitshuffle/2018.05 HDF5_LZ4/2018.05 gcc/4.8.5 hdf5_serial/1.10.3 \n"
|
# "module load crystfel/0.10.1-2 xgandalf/2018.01 HDF5_bitshuffle/2018.05 HDF5_LZ4/2018.05 gcc/4.8.5 hdf5_serial/1.10.3 \n"
|
||||||
# )
|
# )
|
||||||
@@ -539,8 +621,11 @@ class CollectedH5:
|
|||||||
+ ".list -o "
|
+ ".list -o "
|
||||||
+ data_file_name
|
+ data_file_name
|
||||||
+ ".stream -g "
|
+ ".stream -g "
|
||||||
+ str(self.message["run_number"])
|
+ f"{geometry_file_path}"
|
||||||
+ "_jf.geom"
|
#+ " --mille"
|
||||||
|
#+ " --mille-dir=mille-dir"
|
||||||
|
#+ str(self.message["run_number"])
|
||||||
|
#+ "_jf.geom"
|
||||||
+ " -j `nproc` --min-res=75 "
|
+ " -j `nproc` --min-res=75 "
|
||||||
)
|
)
|
||||||
if self.message["user_data"]["retry"]:
|
if self.message["user_data"]["retry"]:
|
||||||
@@ -563,9 +648,9 @@ class CollectedH5:
|
|||||||
#+ "python /sls/MX/applications/git/vdp/src/results.py "
|
#+ "python /sls/MX/applications/git/vdp/src/results.py "
|
||||||
+ data_file_name
|
+ data_file_name
|
||||||
+ ".stream "
|
+ ".stream "
|
||||||
# + data_file_name
|
+ trigger
|
||||||
# + ".log "
|
|
||||||
)
|
)
|
||||||
|
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
@@ -586,6 +671,7 @@ class CollectedH5:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
slurm_out = sub.run(["sbatch", "run_SLURM_" + trigger ], capture_output=True)
|
slurm_out = sub.run(["sbatch", "run_SLURM_" + trigger ], capture_output=True)
|
||||||
|
time.sleep(1)
|
||||||
txt = slurm_out.stdout.decode().split()
|
txt = slurm_out.stdout.decode().split()
|
||||||
# grep the slurm number
|
# grep the slurm number
|
||||||
logger.info(f"submitted batch job number: {txt[-1]}")
|
logger.info(f"submitted batch job number: {txt[-1]}")
|
||||||
@@ -706,11 +792,12 @@ if __name__ == "__main__":
|
|||||||
mess_dec = json.loads(message.decode("utf-8"))
|
mess_dec = json.loads(message.decode("utf-8"))
|
||||||
logger.info(f"Received message is: {mess_dec}")
|
logger.info(f"Received message is: {mess_dec}")
|
||||||
mess_inp = CollectedH5(mess_dec)
|
mess_inp = CollectedH5(mess_dec)
|
||||||
mess_inp.mk_cd_output_dir_bl()
|
out_path = mess_inp.mk_cd_output_dir_bl()
|
||||||
logger.info("output folder created")
|
logger.info("output folder created")
|
||||||
mess_inp.create_cell_file()
|
mess_inp.create_cell_file()
|
||||||
logger.info("cell file created")
|
logger.info("cell file created")
|
||||||
mess_inp.create_geom_from_master()
|
mess_inp.create_geom_from_master()
|
||||||
|
geometry_file_path = mess_inp.copy_and_edit_geom_file_from_master(out_path)
|
||||||
logger.info("geom file created")
|
logger.info("geom file created")
|
||||||
mess_inp.create_list_file()
|
mess_inp.create_list_file()
|
||||||
logger.info("list files created")
|
logger.info("list files created")
|
||||||
@@ -720,7 +807,7 @@ if __name__ == "__main__":
|
|||||||
trigger_list.append("on")
|
trigger_list.append("on")
|
||||||
print(trigger_list)
|
print(trigger_list)
|
||||||
for i in trigger_list:
|
for i in trigger_list:
|
||||||
mess_inp.create_slurm_script(i)
|
mess_inp.create_slurm_script(i, geometry_file_path)
|
||||||
mess_inp.submit_job_to_slurm(i)
|
mess_inp.submit_job_to_slurm(i)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ import MxdbVdpTools
|
|||||||
# import matplotlib.pyplot as plt
|
# import matplotlib.pyplot as plt
|
||||||
|
|
||||||
#LOG_FILENAME = time.strftime("/sf/cristallina/applications/mx/clara_tools/log/clara_%Y%m.log")
|
#LOG_FILENAME = time.strftime("/sf/cristallina/applications/mx/clara_tools/log/clara_%Y%m.log")
|
||||||
LOG_FILENAME = time.strftime("/sf/cristallina/data/p21981/res/clara_%Y%m.log")
|
LOG_FILENAME = time.strftime("/sf/cristallina/data/p22226/res/clara_%Y%m.log")
|
||||||
logger.add(LOG_FILENAME, level="INFO", rotation="100MB")
|
logger.add(LOG_FILENAME, level="INFO", rotation="100MB")
|
||||||
|
|
||||||
|
|
||||||
@@ -152,8 +152,8 @@ def stream_to_dictionary(streamfile):
|
|||||||
ln += 1
|
ln += 1
|
||||||
|
|
||||||
# loop over the next 5 lines to get the diffraction resolution
|
# loop over the next 5 lines to get the diffraction resolution
|
||||||
line = loop_over_next_n_lines(text_file, 5).split()
|
line = loop_over_next_n_lines(text_file, 5).split() # 5 for crystfel 10 # 7 for crystfel 11
|
||||||
ln += 5
|
ln += 7
|
||||||
|
|
||||||
# multiple lattices not done yet
|
# multiple lattices not done yet
|
||||||
if line[0] == "predict_refine/det_shift":
|
if line[0] == "predict_refine/det_shift":
|
||||||
@@ -161,9 +161,10 @@ def stream_to_dictionary(streamfile):
|
|||||||
tmp_crystal["det_shift_y"] = line[6]
|
tmp_crystal["det_shift_y"] = line[6]
|
||||||
line = loop_over_next_n_lines(text_file, 1).split()
|
line = loop_over_next_n_lines(text_file, 1).split()
|
||||||
ln += 1
|
ln += 1
|
||||||
|
|
||||||
tmp_crystal["diffraction_resolution_limit [nm^-1]"] = float(line[2])
|
tmp_crystal["diffraction_resolution_limit_nm_inverse"] = float(line[2])
|
||||||
tmp_crystal["diffraction_resolution_limit [A]"] = float(line[5])
|
#print(line)
|
||||||
|
tmp_crystal["diffraction_resolution_limit_angstrom"] = float(line[5]) #[5] for crystfel 0.11.1 [5] fpr 0.10.2-rehl8
|
||||||
|
|
||||||
# get the number of predicted reflections
|
# get the number of predicted reflections
|
||||||
num_reflections = int(text_file.readline().split()[-1])
|
num_reflections = int(text_file.readline().split()[-1])
|
||||||
@@ -243,7 +244,10 @@ def get_data_from_streamfiles():
|
|||||||
# parse stream into dict
|
# parse stream into dict
|
||||||
#TODO get on or off from streamfile naming and put it in output message
|
#TODO get on or off from streamfile naming and put it in output message
|
||||||
parsed_stream = stream_to_dictionary(sys.argv[1])
|
parsed_stream = stream_to_dictionary(sys.argv[1])
|
||||||
|
old_message["trigger_status"]= sys.argv[2]
|
||||||
old_message["numberOfImages"] = len(parsed_stream)
|
old_message["numberOfImages"] = len(parsed_stream)
|
||||||
|
|
||||||
|
old_message["user_tag"] = old_message["user_data"]["file_prefix"].split('-')[1]
|
||||||
|
|
||||||
# get number of indexable frames- not accounted for multilattice
|
# get number of indexable frames- not accounted for multilattice
|
||||||
# print(parsed_stream[0].keys())
|
# print(parsed_stream[0].keys())
|
||||||
@@ -255,10 +259,13 @@ def get_data_from_streamfiles():
|
|||||||
spots_per_frame = np.zeros((len(parsed_stream)))
|
spots_per_frame = np.zeros((len(parsed_stream)))
|
||||||
indexable_lattices = 0
|
indexable_lattices = 0
|
||||||
for i in range(0, len(parsed_stream)):
|
for i in range(0, len(parsed_stream)):
|
||||||
# get spots per indexable frames:
|
try:
|
||||||
spots_per_frame[i] = parsed_stream[i]["num_peaks"]
|
# get spots per indexable frames:
|
||||||
# get total number of indexable lattices
|
spots_per_frame[i] = parsed_stream[i]["num_peaks"]
|
||||||
indexable_lattices = indexable_lattices + parsed_stream[i]["multiple_lattices"]
|
# get total number of indexable lattices
|
||||||
|
indexable_lattices = indexable_lattices + parsed_stream[i]["multiple_lattices"]
|
||||||
|
except:
|
||||||
|
logger.warning(f"Error: missing image from stream file: {i}")
|
||||||
|
|
||||||
# put into dict for results, convert to list to be json serializable
|
# put into dict for results, convert to list to be json serializable
|
||||||
old_message["numberOfSpotsPerImage"] = (spots_per_frame.astype(int)).tolist()
|
old_message["numberOfSpotsPerImage"] = (spots_per_frame.astype(int)).tolist()
|
||||||
@@ -267,24 +274,32 @@ def get_data_from_streamfiles():
|
|||||||
# get number of indexed lattices per pattern
|
# get number of indexed lattices per pattern
|
||||||
lattices_per_indx_frame = {}
|
lattices_per_indx_frame = {}
|
||||||
for i in indexable_frames:
|
for i in indexable_frames:
|
||||||
lattices_per_indx_frame[int(i)] = parsed_stream[i]["multiple_lattices"]
|
lattices_per_indx_frame[int(i)] = parsed_stream[i]["multiple_lattices"]
|
||||||
|
|
||||||
old_message["numberOfLatticesPerImage"] = lattices_per_indx_frame
|
old_message["numberOfLatticesPerImage"] = lattices_per_indx_frame
|
||||||
|
|
||||||
# mean beam center shift X and Y
|
# mean beam center shift X and Y
|
||||||
list_x = []
|
list_x = []
|
||||||
list_y = []
|
list_y = []
|
||||||
|
resolution_limit_list = []
|
||||||
|
reflections_list = []
|
||||||
# define np.array
|
# define np.array
|
||||||
uc_array = np.zeros((indexable_lattices, 6))
|
uc_array = np.zeros((indexable_lattices, 6))
|
||||||
b = 0
|
b = 0
|
||||||
for i in indexable_frames:
|
for i in indexable_frames:
|
||||||
for x in range(0, len(parsed_stream[i]["crystals"])):
|
for x in range(0, len(parsed_stream[i]["crystals"])):
|
||||||
# det shift in x and y
|
# det shift in x and y
|
||||||
list_x.append((parsed_stream[i]["crystals"][x]["det_shift_x"]))
|
try:
|
||||||
list_y.append((parsed_stream[i]["crystals"][x]["det_shift_y"]))
|
list_x.append((parsed_stream[i]["crystals"][x]["det_shift_x"]))
|
||||||
# unit cell constants
|
list_y.append((parsed_stream[i]["crystals"][x]["det_shift_y"]))
|
||||||
uc_array[b] = np.asarray((parsed_stream[i]["crystals"][x]["Cell parameters"]))
|
# unit cell constants
|
||||||
b = b + 1
|
uc_array[b] = np.asarray((parsed_stream[i]["crystals"][x]["Cell parameters"]))
|
||||||
|
resolution_limit_list.append((parsed_stream[i]["crystals"][x]["diffraction_resolution_limit_angstrom"]))
|
||||||
|
reflections_list.append((parsed_stream[i]["crystals"][x]["num_predicted_reflections"]))
|
||||||
|
b = b + 1
|
||||||
|
except:
|
||||||
|
logger.warning(f"Error: missing image from stream file: {i}")
|
||||||
|
|
||||||
if len(indexable_frames) == 0:
|
if len(indexable_frames) == 0:
|
||||||
old_message["beamShiftMeanX_pxl"] = 0
|
old_message["beamShiftMeanX_pxl"] = 0
|
||||||
old_message["beamShiftMeanY_pxl"] = 0
|
old_message["beamShiftMeanY_pxl"] = 0
|
||||||
@@ -292,6 +307,10 @@ def get_data_from_streamfiles():
|
|||||||
old_message["beamShiftStdY_pxl"] = 0
|
old_message["beamShiftStdY_pxl"] = 0
|
||||||
old_message["unitCellIndexingMean"] = 0
|
old_message["unitCellIndexingMean"] = 0
|
||||||
old_message["unitCellIndexingStd"] = 0
|
old_message["unitCellIndexingStd"] = 0
|
||||||
|
old_message["resolutionLimitMean"] = 0
|
||||||
|
old_message["resolutionLimitStd"] = 0
|
||||||
|
old_message["numberReflectionsMean"] = 0
|
||||||
|
old_message["numberReflectionsStd"] = 0
|
||||||
return old_message
|
return old_message
|
||||||
|
|
||||||
# ------ DET SHIFT MEAN and STD-------
|
# ------ DET SHIFT MEAN and STD-------
|
||||||
@@ -304,6 +323,7 @@ def get_data_from_streamfiles():
|
|||||||
std_x = np.around(np.std(np.asarray(list_x).astype(float)), 4)
|
std_x = np.around(np.std(np.asarray(list_x).astype(float)), 4)
|
||||||
mean_y = np.around(np.mean(np.asarray(list_y).astype(float)), 4)
|
mean_y = np.around(np.mean(np.asarray(list_y).astype(float)), 4)
|
||||||
std_y = np.around(np.std(np.asarray(list_y).astype(float)), 4)
|
std_y = np.around(np.std(np.asarray(list_y).astype(float)), 4)
|
||||||
|
|
||||||
# convert to pixel unit
|
# convert to pixel unit
|
||||||
# 0.075 mm = 1 pixel =75 um
|
# 0.075 mm = 1 pixel =75 um
|
||||||
# print(mean_x, mean_x * (1/0.075), std_x, std_x * (1/0.075), "x")
|
# print(mean_x, mean_x * (1/0.075), std_x, std_x * (1/0.075), "x")
|
||||||
@@ -320,7 +340,21 @@ def get_data_from_streamfiles():
|
|||||||
#convert to list to be json serializable
|
#convert to list to be json serializable
|
||||||
old_message["unitCellIndexingMean"] = (np.around(mean_uc, 3)).tolist()
|
old_message["unitCellIndexingMean"] = (np.around(mean_uc, 3)).tolist()
|
||||||
old_message["unitCellIndexingStd"] = (np.around(std_uc, 3)).tolist()
|
old_message["unitCellIndexingStd"] = (np.around(std_uc, 3)).tolist()
|
||||||
|
|
||||||
|
#calculate diffraction resolution limit
|
||||||
|
mean_res = np.around(np.mean(np.asarray(resolution_limit_list).astype(float)), 4)
|
||||||
|
std_res = np.around(np.std(np.asarray(resolution_limit_list).astype(float)), 4)
|
||||||
|
old_message["resolutionLimitMean"] = mean_res
|
||||||
|
old_message["resolutionLimitStd"] = std_res
|
||||||
|
|
||||||
|
#number of predicted reflections
|
||||||
|
mean_refs = np.around(np.mean(np.asarray(reflections_list).astype(float)), 4)
|
||||||
|
std_refs = np.around(np.std(np.asarray(reflections_list).astype(float)), 4)
|
||||||
|
old_message["numberReflectionsMean"] = mean_refs
|
||||||
|
old_message["numberReflectionsStd"] = std_refs
|
||||||
|
|
||||||
|
#number of spots/peaks per image
|
||||||
|
|
||||||
# print(old_message)
|
# print(old_message)
|
||||||
return old_message
|
return old_message
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user