From d3b5dcc3b9f8d5beaec473475af2ae5d123ecbc6 Mon Sep 17 00:00:00 2001 From: Assmann Greta Marie Date: Thu, 7 Nov 2024 20:23:43 +0100 Subject: [PATCH] changes to clara.py and results.py made during commissioning beamtime --- src/clara.py | 105 +++++++++++++++++++------------------------------ src/results.py | 15 +++++-- 2 files changed, 53 insertions(+), 67 deletions(-) diff --git a/src/clara.py b/src/clara.py index 453a20d..e1731b8 100644 --- a/src/clara.py +++ b/src/clara.py @@ -95,11 +95,12 @@ class CollectedH5: now = datetime.datetime.now().strftime("%Y%m%d_%H%M%S") pgroup = str(self.message["experiment_group"]) res = "res" - #data_path = "data" - + file_name = Path(str(self.message["user_data"]["file_prefix"])) + acq_number = self.message["file_number"] + run_number = self.message["run_number"] + acq_folder = f'run{run_number:04}_acq{acq_number:04}_{str(now)}' + out_path = pa / pgroup / res / file_name / acq_folder - file_name = Path(str(self.message["filename"][:-3]) + "_" + str(now)) - out_path = pa / pgroup / res / file_name logger.info(f"processing folder will be created at : {out_path}") try: out_path.mkdir(parents=True, exist_ok=True) @@ -299,7 +300,7 @@ class CollectedH5: f2.write("\n") f2.write("; Camera length (in m) and photon energy (eV) \n") f2.write("clen = " + str(self.message["detector_distance_m"]) + "\n") - f2.write("photon_energy = " + str(self.message["photon_energy_eV"]) + "\n") + f2.write("photon_energy = " + str(self.message["incident_energy_eV"]) + "\n") f2.write("flag_lessthan = " + str(self.message["underload"]) + "\n") f2.write("\n") #f2.write("adu_per_eV = 0.00008065\n") @@ -321,8 +322,8 @@ class CollectedH5: # Assembling path for master file pgroup = str(self.message["experiment_group"]) raw = "raw" - master_file = str(self.message["filename"][:-11]) + str("master.h5") - master_file_path = pa / pgroup / raw / master_file + master_file = str(self.message["filename"][:-19]) + str("master.h5") + master_file_path = slash / master_file f2.write(";mask_file ="+ str(master_file_path) +"\n") f2.write(";mask = /entry/instrument/detector/pixel_mask \n") @@ -357,22 +358,25 @@ class CollectedH5: Function to generate a list file with the path of the input H5 file :return:None """ - print('writing files') + print('writing list files') #Assemble path for raw data pgroup = str(self.message["experiment_group"]) raw = "raw" data = "data" filen = str(self.message["filename"]) - file_path = pa / pgroup / raw / filen + file_path = slash / filen # write to cell file in output folder - name = str(self.message["run_number"]) + run_number = self.message["run_number"] + acq_number = self.message["file_number"] + name = f'run{run_number:04}_acq{acq_number:04}' + #f = open(name + ".list", "w") #f.write(str(file_path)) #f.close() - bsdata_name = filen.split('.')[0]+'.BSDATA.h5' + bsdata_name = Path(filen[:-15]+'.BSDATA.h5') - bsdata_path= pa/ pgroup /raw / bsdata_name + bsdata_path = slash / bsdata_name print(bsdata_path) while not bsdata_path.exists(): @@ -382,83 +386,48 @@ class CollectedH5: bsdata = h5py.File(bsdata_path, "r") #r"/sf/cristallina/data/p21630/raw/run0065-lov_movestop_normal_1/data/acq0001.BSDATA.h5" except Exception as e: print(f"didn't open bsdata due to error {e}") #_logger.error(f"Cannot open {data_file} (due to {e})") - - #print(pulseids_JF[0]) - + return + pulseids_BS = bsdata[f"/SAR-CVME-TIFALL6:EvtSet/pulse_id"][:] - #print(pulseids_BS[0]) - evt_set=bsdata[f"/SAR-CVME-TIFALL6:EvtSet/data"][:] jf_path= file_path - #print(jf_path) + try: - #r"/sf/cristallina/data/p21630/raw/run0065-lov_movestop_normal_1/data/acq0001.JF17T16V01.h5" x = h5py.File(jf_path, "r") except Exception as e: print(f"didn't open JF17T16V01.h5 due to error {e}") #_logger.error(f"Cannot open {data_file} (due to {e})") return pulseids_JF = x[f"/entry/xfel/pulseID"][:] - #pulseids in JFJ joch path - #pulseids_JF = x[f"/data/{detector}/pulse_id"][:] - - - - for i, pulse_id in enumerate(pulseids_BS): - pulseids_BS[i]=pulse_id-1 n_pulse_id = len(pulseids_JF) #- maybe not needed ? - #if f"/data/{detector}/is_good_frame" in bsdata: - # is_good_frame = evt_set=bsdata[f"/SAR-CVME-TIFALL6:EvtSet/data"][:][f"/data/{detector}/is_good_frame"][:] - #else: - # is_good_frame = [1] * n_pulse_id - - #daq_recs = x[f"/data/{detector}/daq_rec"][:] - - #nGoodFrames = 0 - #nProcessedFrames = 0 - index_dark = [] index_light = [] blanks = [] for i in range(n_pulse_id): - - #if not is_good_frame[i]: - # continue - - #nGoodFrames += 1 - #nProcessedFrames += 1 - p = pulseids_JF[i] q = pulseids_BS[i] - #print(p) - #print(q) if p != q: - #print(f'acquisition = {acquisition}') - #print(f'Jungfrau pulse id {p} != BS data id {q} for image {i}') event_i = np.where(pulseids_JF == q)[0] event_i = i - #print(f'new i = {event_i}, not {i}, now calling BS data for pulse id {pulseids_BS[event_i]}') else: event_i=i events=evt_set[event_i] - #print(evt_set) - #print(e) - #print(event_i) - #print(i) - if events[216] and events[200]: + + trigger_event = int(self.message["user_data"]["trigger_event"]) + + if events[trigger_event] and events[200]: index_light.append(i) elif events[200]: index_dark.append(i) else: - #print('Should only be here because 200 is false. 200 is {0}'.format(e[200])) blanks.append(i) bsdata.close() @@ -517,7 +486,10 @@ class CollectedH5: # get dat file name without any preceding paths.. #last_pos = str(self.message["dataFileName"]).rfind("/") #data_file_name = str(self.message["filename"][: -3]) - data_file_name = str(self.message["run_number"])+'_'+trigger + + run_number = self.message["run_number"] + acq_number = self.message["file_number"] + data_file_name = f'run{run_number:04}_acq{acq_number:04}_{trigger}' # write file f = open("run_SLURM_" + trigger, "w") @@ -543,9 +515,9 @@ class CollectedH5: f.write("# Actual Indexing command for crystFEL \n") f.write( " indexamajig --peaks=" - + str(self.message["user_data"]["peaks"] + + str(self.message["user_data"]["peaks"]) + " --indexing=" - + str(self.message["user_data"]["indexing"] + + str(self.message["user_data"]["indexing"]) + " --xgandalf-fast-execution --threshold=" + str(self.message["user_data"]["threshold"]) + " --tolerance=" @@ -553,19 +525,19 @@ class CollectedH5: + " --int-radius=" + str(self.message["user_data"]["int-radius"]) + " --integration=" - + str(self.message["user_data"]["integration"] + + str(self.message["user_data"]["integration"]) + " -p " + str(self.message["run_number"]) + ".cell --min-snr=" + str(self.message["user_data"]["min-snr"]) + " --min-peaks=" + str(self.message["user_data"]["min-peaks"]) - + ' --min-pix-count=" + + " --min-pix-count=" + str(self.message["user_data"]["min-pix-count"]) + " -i " - + str(self.message["run_number"]) + "_" + trigger + + data_file_name + ".list -o " - + data_file_name + + data_file_name + ".stream -g " + str(self.message["run_number"]) + "_jf.geom" @@ -695,12 +667,13 @@ if __name__ == "__main__": elif sys.argv[1] == "z": pa = Path("/sf/cristallina/data/") + slash = Path("/") # potential message recieving: logger.info("SUBscribing to ZeroMQ PUBlisher.. connecting ...") context = zmq.Context() subscriber = context.socket(zmq.SUB) - subscriber.connect("tcp://sf-broker-01.psi.ch:5555") - #subscriber.connect("tcp://sf-daqtest-01.psi.ch:5401") + #subscriber.connect("tcp://sf-broker-01.psi.ch:5555") + subscriber.connect("tcp://sf-daqtest-01.psi.ch:5401") subscriber.setsockopt_string(zmq.SUBSCRIBE, "") @@ -742,7 +715,11 @@ if __name__ == "__main__": mess_inp.create_list_file() logger.info("list files created") # if we have two list files, loop with two different arguments "on" /"off": - for i in ["on","off"]: + trigger_list = ["off"] + if mess_dec["user_data"]["trigger_flag"]: + trigger_list.append("on") + print(trigger_list) + for i in trigger_list: mess_inp.create_slurm_script(i) mess_inp.submit_job_to_slurm(i) diff --git a/src/results.py b/src/results.py index 17a22ce..a16b0ad 100644 --- a/src/results.py +++ b/src/results.py @@ -284,6 +284,15 @@ def get_data_from_streamfiles(): # unit cell constants uc_array[b] = np.asarray((parsed_stream[i]["crystals"][x]["Cell parameters"])) b = b + 1 + + if len(indexable_frames) == 0: + old_message["beamShiftMeanX_pxl"] = 0 + old_message["beamShiftMeanY_pxl"] = 0 + old_message["beamShiftStdX_pxl"] = 0 + old_message["beamShiftStdY_pxl"] = 0 + old_message["unitCellIndexingMean"] = 0 + old_message["unitCellIndexingStd"] = 0 + return old_message # ------ DET SHIFT MEAN and STD------- @@ -357,7 +366,7 @@ if __name__ == "__main__": logger.info("message inserted to DB") # retreive the inserted doc from the database - #doc = mxdb.query(_id=_id["insertID"]) - #logger.info("doc info from DB is: ") - #logger.info(doc) + doc = mxdb.query(_id=_id["insertID"]) + logger.info("doc info from DB is: ") + logger.info(doc)