Rerun jupyternotebooks to check their functionality after relocating them to notebooks. OpenBis related python scripts still need to be tested.
This commit is contained in:
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,9 +1,21 @@
|
|||||||
import src.openbis_lib as openbis_lib
|
|
||||||
import hdf5_lib
|
|
||||||
import datetime
|
|
||||||
import os
|
import os
|
||||||
|
from nbutils import add_project_path_to_sys_path
|
||||||
|
|
||||||
|
|
||||||
|
# Add project root to sys.path
|
||||||
|
add_project_path_to_sys_path()
|
||||||
|
|
||||||
|
import datetime
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
try:
|
||||||
|
import src.openbis_lib as openbis_lib
|
||||||
|
import src.hdf5_ops as hdf5_ops
|
||||||
|
#import pipelines.metadata_revision as metadata_revision
|
||||||
|
print("Imports successful!")
|
||||||
|
except ImportError as e:
|
||||||
|
print(f"Import error: {e}")
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
#df_h5 = hdf5_lib.read_hdf5_as_dataframe_v2('BeamTimeMetaData.h5')
|
#df_h5 = hdf5_lib.read_hdf5_as_dataframe_v2('BeamTimeMetaData.h5')
|
||||||
@ -20,7 +32,8 @@ def main():
|
|||||||
print(sample.identifier)
|
print(sample.identifier)
|
||||||
df_openbis = samples.df.copy(deep=True)
|
df_openbis = samples.df.copy(deep=True)
|
||||||
h5_file_path = os.path.join(os.path.curdir,'input_files\\BeamTimeMetaData.h5')
|
h5_file_path = os.path.join(os.path.curdir,'input_files\\BeamTimeMetaData.h5')
|
||||||
df_h5 = hdf5_lib.read_hdf5_as_dataframe(h5_file_path)
|
|
||||||
|
df_h5 = hdf5_ops.read_mtable_as_dataframe(h5_file_path)
|
||||||
|
|
||||||
# dataframe preprocessing steps
|
# dataframe preprocessing steps
|
||||||
df_h5, df_openbis = openbis_lib.align_datetime_observation_windows(df_h5, df_openbis)
|
df_h5, df_openbis = openbis_lib.align_datetime_observation_windows(df_h5, df_openbis)
|
||||||
|
File diff suppressed because one or more lines are too long
@ -1,12 +1,22 @@
|
|||||||
import os, sys
|
|
||||||
sys.path.append(os.path.abspath('src'))
|
|
||||||
|
|
||||||
import src.openbis_lib as openbis_lib
|
|
||||||
import src.hdf5_lib as hdf5_lib
|
|
||||||
import datetime
|
|
||||||
import os
|
import os
|
||||||
|
from nbutils import add_project_path_to_sys_path
|
||||||
|
|
||||||
|
|
||||||
|
# Add project root to sys.path
|
||||||
|
add_project_path_to_sys_path()
|
||||||
|
|
||||||
|
import datetime
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
try:
|
||||||
|
import src.openbis_lib as openbis_lib
|
||||||
|
import src.hdf5_ops as hdf5_ops
|
||||||
|
#import pipelines.metadata_revision as metadata_revision
|
||||||
|
print("Imports successful!")
|
||||||
|
except ImportError as e:
|
||||||
|
print(f"Import error: {e}")
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
#df_h5 = hdf5_lib.read_hdf5_as_dataframe_v2('BeamTimeMetaData.h5')
|
#df_h5 = hdf5_lib.read_hdf5_as_dataframe_v2('BeamTimeMetaData.h5')
|
||||||
@ -23,7 +33,7 @@ def main():
|
|||||||
print(sample.identifier)
|
print(sample.identifier)
|
||||||
df_openbis = samples.df.copy(deep=True)
|
df_openbis = samples.df.copy(deep=True)
|
||||||
h5_file_path = os.path.join(os.path.curdir,'input_files\\BeamTimeMetaData.h5')
|
h5_file_path = os.path.join(os.path.curdir,'input_files\\BeamTimeMetaData.h5')
|
||||||
df_h5 = hdf5_lib.read_mtable_as_dataframe(h5_file_path)
|
df_h5 = hdf5_ops.read_mtable_as_dataframe(h5_file_path)
|
||||||
|
|
||||||
# dataframe preprocessing steps
|
# dataframe preprocessing steps
|
||||||
df_h5, df_openbis = openbis_lib.align_datetime_observation_windows(df_h5, df_openbis)
|
df_h5, df_openbis = openbis_lib.align_datetime_observation_windows(df_h5, df_openbis)
|
||||||
|
@ -23,11 +23,29 @@
|
|||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": null,
|
"execution_count": null,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"Imports successful!\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
"source": [
|
"source": [
|
||||||
"import src.metadata_review_lib as metadata_annotation\n",
|
"import os\n",
|
||||||
"import src.hdf5_vis as hdf5_vis\n",
|
"from nbutils import add_project_path_to_sys_path\n",
|
||||||
"import os"
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"# Add project root to sys.path\n",
|
||||||
|
"add_project_path_to_sys_path()\n",
|
||||||
|
"\n",
|
||||||
|
"try:\n",
|
||||||
|
" import src.hdf5_ops as hdf5_ops\n",
|
||||||
|
" import pipelines.metadata_revision as metadata_revision\n",
|
||||||
|
" print(\"Imports successful!\")\n",
|
||||||
|
"except ImportError as e:\n",
|
||||||
|
" print(f\"Import error: {e}\")"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -46,7 +64,7 @@
|
|||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"hdf5_file_path = \"output_files/kinetic_flowtube_study_2023-06-29_LuciaI.h5\""
|
"hdf5_file_path = \"../output_files/collection_kinetic_flowtube_study_LuciaI_2022-01-31_2023-06-29/kinetic_flowtube_study_LuciaI_2023-06-29.h5\""
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -62,11 +80,19 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": null,
|
"execution_count": 4,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"The YAML file representation output_files/collection_kinetic_flowtube_study_LuciaI_2022-01-31_2023-06-29/kinetic_flowtube_study_LuciaI_2023-06-29.json of the HDF5 file output_files/collection_kinetic_flowtube_study_LuciaI_2022-01-31_2023-06-29/kinetic_flowtube_study_LuciaI_2023-06-29.h5 was created successfully.\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
"source": [
|
"source": [
|
||||||
"yaml_file_path = hdf5_vis.take_yml_snapshot_of_hdf5_file(hdf5_file_path)\n",
|
"yaml_file_path = hdf5_ops.serialize_metadata(hdf5_file_path,output_format='json')\n",
|
||||||
"\n",
|
"\n",
|
||||||
"if os.path.exists(yaml_file_path):\n",
|
"if os.path.exists(yaml_file_path):\n",
|
||||||
" print(f'The YAML file representation {yaml_file_path} of the HDF5 file {hdf5_file_path} was created successfully.')"
|
" print(f'The YAML file representation {yaml_file_path} of the HDF5 file {hdf5_file_path} was created successfully.')"
|
||||||
@ -118,7 +144,7 @@
|
|||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"\n",
|
"\n",
|
||||||
"metadata_annotation.update_hdf5_file_with_review(hdf5_file_path,yaml_file_path)"
|
"metadata_revision.update_hdf5_file_with_review(hdf5_file_path,yaml_file_path)"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
Reference in New Issue
Block a user