diff --git a/epics-writer/epics_writer/writer.py b/epics-writer/epics_writer/writer.py index aa7e346..f808a55 100644 --- a/epics-writer/epics_writer/writer.py +++ b/epics-writer/epics_writer/writer.py @@ -1,63 +1,46 @@ -# import data_api -# import data_api.client -import requests import datetime import time import logging +import requests + logger = logging.getLogger("logger") -# This is how the notification look like -# { -# 'range': { -# 'startPulseId': 100, -# 'endPulseId': 120 -# }, -# -# 'parameters': { -# 'general/created': 'test', -# 'general/user': 'tester', -# 'general/process': 'test_process', -# 'general/instrument': 'mac', -# 'output_file': '/bla/test.h5'} # this is usually the full path -# } - -def write_epics_pvs( - output_file, start_pulse_id, stop_pulse_id, metadata, epics_pvs): - - logger.info("Dump data to hdf5 ...") - logger.info("Retrieve data for channels: %s" % epics_pvs) +def write_epics_pvs(output_file, start_pulse_id, stop_pulse_id, metadata, epics_pvs): logger.info("Retrieve pulse-id / data mapping for pulse ids") start_date, end_date = get_pulse_id_date_mapping([start_pulse_id, stop_pulse_id]) - # logger.info("Retrieving data for interval start: " + str( - # start_date) + " end: " + str(end_date) + " . From " + new_base_url) - # data = get_data(channels, start=start_date, end=end_date, - # base_url=new_base_url) + logger.info("Retrieving data for interval start: " + str( + start_date) + " end: " + str(end_date) + " . From " + new_base_url) - # if len(data) < 1: - # logger.error("No data retrieved") - # open(new_filename + "_NO_DATA", 'a').close() - # - # else: - # if new_filename: - # logger.info("Persist data to hdf5 file") - # data_api.to_hdf5(data, new_filename, overwrite=True, - # compression=None, shuffle=False) + data = get_data(epics_pvs, start=start_date, end=end_date, base_url=new_base_url) + + if len(data) < 1: + logger.error("No data retrieved") + open(new_filename + "_NO_DATA", 'a').close() + + else: + if new_filename: + logger.info("Persist data to hdf5 file") + data_api.to_hdf5(data, new_filename, overwrite=True, + compression=None, shuffle=False) def get_data(channel_list, start=None, end=None, base_url=None): + + logger.info("Requesting range %s to %s." % (start, end)) + logger.info("Retrieve data for channels: %s" % channel_list) + query = {"range": {"startDate": datetime.datetime.isoformat(start), "endDate": datetime.datetime.isoformat(end), "startExpansion": True}, "channels": channel_list, "fields": ["pulseId", "globalSeconds", "globalDate", "value", "eventCount"]} - - logger.info(query) + logger.debug(query) response = requests.post(base_url + '/query', json=query) @@ -78,11 +61,10 @@ def get_data(channel_list, start=None, end=None, base_url=None): raise RuntimeError("Unable to retrieve data from server: ", response) logger.info("Data retieval is successful") - # - # data = response.json() - # - # return data_api.client._build_pandas_data_frame(data, - # index_field="globalDate") + + data = response.json() + + return data_api.client._build_pandas_data_frame(data, index_field="globalDate") def get_pulse_id_date_mapping(pulse_ids): @@ -122,32 +104,30 @@ def get_pulse_id_date_mapping(pulse_ids): logger.info("retrieval failed") if c == 0: ref_date = data[0]["data"][0]["globalDate"] - # ref_date = dateutil.parser.parse(ref_date) - # - # now_date = datetime.datetime.now() - # now_date = pytz.timezone('Europe/Zurich').localize( - # now_date) - # - # check_date = ref_date + datetime.timedelta( - # seconds=24) # 20 seconds should be enough - # delta_date = check_date - now_date - # - # s = delta_date.seconds - # logger.info("retry in " + str(s) + " seconds ") - # if not s <= 0: - # time.sleep(s) - # continue - # - # raise RuntimeError('Unable to retrieve mapping') - # - # date = data[0]["data"][0]["globalDate"] - # date = dateutil.parser.parse(date) - # dates.append(date) - # break + ref_date = dateutil.parser.parse(ref_date) + + now_date = datetime.datetime.now() + now_date = pytz.timezone('Europe/Zurich').localize( + now_date) + + check_date = ref_date + datetime.timedelta( + seconds=24) # 20 seconds should be enough + delta_date = check_date - now_date + + s = delta_date.seconds + logger.info("retry in " + str(s) + " seconds ") + if not s <= 0: + time.sleep(s) + continue + + raise RuntimeError('Unable to retrieve mapping') + + date = data[0]["data"][0]["globalDate"] + date = dateutil.parser.parse(date) + dates.append(date) + break return dates - except Exception: - raise RuntimeError('Unable to retrieve mapping') - - + except Exception as e: + raise RuntimeError('Unable to retrieve pulse_id date mapping') from e