Implemented minor refactoring changes.

This commit is contained in:
2024-10-10 16:26:40 +02:00
parent 61255f9e8c
commit 1e06bca81c

View File

@ -122,13 +122,17 @@ app.layout = dbc.Container([
def load_data(filename, contents): def load_data(filename, contents):
data = {'data_loaded_flag': False} data = {'data_loaded_flag': False}
if filename and contents and filename.endswith('.h5'): if filename and contents and filename.endswith('.h5'):
try: try:
path_to_file = data_flagging_utils.save_file(filename,contents) path_to_file = data_flagging_utils.save_file(filename,contents)
DataOps = hdf5_ops.HDF5DataOpsManager(path_to_file)
DataOps.load_file_obj()
#content_type, content_string = contents.split(',') #content_type, content_string = contents.split(',')
#decoded = base64.b64decode(content_string) #decoded = base64.b64decode(content_string)
#file_path = io.BytesIO(decoded) #file_path = io.BytesIO(decoded)
DataOps = hdf5_ops.HDF5DataOpsManager(path_to_file) DataOps.extract_and_load_dataset_metadata()
DataOps.load_dataset_metadata()
df = DataOps.dataset_metadata_df df = DataOps.dataset_metadata_df
# TODO: allow selection of instrument folder # TODO: allow selection of instrument folder
instfolder = df['parent_instrument'].unique()[0] instfolder = df['parent_instrument'].unique()[0]
@ -138,12 +142,16 @@ def load_data(filename, contents):
data['path_to_uploaded_file'] = path_to_file data['path_to_uploaded_file'] = path_to_file
data['instfolder'] = instfolder data['instfolder'] = instfolder
DataOps.unload_file_obj()
return data, fig return data, fig
except Exception as e: except Exception as e:
DataOps.unload_file_obj()
print(f"Error processing file: {e}") print(f"Error processing file: {e}")
return data, dash.no_update return data, dash.no_update
return data, dash.no_update return data, dash.no_update
@app.callback( @app.callback(