Moved to notebooks/
This commit is contained in:
128
notebooks/demo_create_and_visualize_hdf5_file.ipynb
Normal file
128
notebooks/demo_create_and_visualize_hdf5_file.ipynb
Normal file
@ -0,0 +1,128 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os, sys\n",
|
||||
"sys.path.append(os.path.abspath('src'))\n",
|
||||
"\n",
|
||||
"import src.hdf5_lib as h5lib\n",
|
||||
"import src.hdf5_vis as h5vis\n",
|
||||
"\n",
|
||||
"import src.g5505_utils as utils\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# define input file directory\n",
|
||||
"\n",
|
||||
"input_file_path = './input_files\\\\BeamTimeMetaData.h5'\n",
|
||||
"output_dir_path = './output_files'\n",
|
||||
"if not os.path.exists(output_dir_path):\n",
|
||||
" os.makedirs(output_dir_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Read the above specified input_file_path as a dataframe. \n",
|
||||
"\n",
|
||||
"Since we know this file was created from a Thorsten Table's format, we can use h5lib.read_mtable_as_dataframe() to read it.\n",
|
||||
"\n",
|
||||
"Then, we rename the 'name' column as 'filename', as this is the column's name use to idenfify files in subsequent functions.\n",
|
||||
"Also, we augment the dataframe with a few categorical columns to be used as grouping variables when creating the hdf5 file's group hierarchy. "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Read BeamTimeMetaData.h5, containing Thorsten's Matlab Table\n",
|
||||
"input_data_df = h5lib.read_mtable_as_dataframe(input_file_path)\n",
|
||||
"\n",
|
||||
"# Preprocess Thorsten's input_data dataframe so that i can be used to create a newer .h5 file\n",
|
||||
"# under certain grouping specificiations.\n",
|
||||
"input_data_df = input_data_df.rename(columns = {'name':'filename'})\n",
|
||||
"input_data_df = utils.augment_with_filenumber(input_data_df)\n",
|
||||
"input_data_df = utils.augment_with_filetype(input_data_df)\n",
|
||||
"input_data_df = utils.split_sample_col_into_sample_and_data_quality_cols(input_data_df)\n",
|
||||
"input_data_df['lastModifiedDatestr'] = input_data_df['lastModifiedDatestr'].astype('datetime64[s]')\n",
|
||||
"\n",
|
||||
"input_data_df.columns\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"We now create a hdf5 file with a 3-level group hierarchy based on the input_data and three grouping functions. Then\n",
|
||||
"we visualize the group hierarchy of the created file as a treemap."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Define grouping functions to be passed into create_hdf5_file function. These can also be set\n",
|
||||
"# as strings refering to categorical columns in input_data_df.\n",
|
||||
"\n",
|
||||
"test_grouping_funcs = True\n",
|
||||
"if test_grouping_funcs:\n",
|
||||
" group_by_sample = lambda x : utils.group_by_df_column(x,'sample')\n",
|
||||
" group_by_type = lambda x : utils.group_by_df_column(x,'filetype')\n",
|
||||
" group_by_filenumber = lambda x : utils.group_by_df_column(x,'filenumber')\n",
|
||||
"else:\n",
|
||||
" group_by_sample = 'sample'\n",
|
||||
" group_by_type = 'filetype'\n",
|
||||
" group_by_filenumber = 'filenumber'\n",
|
||||
"\n",
|
||||
"output_filename = 'test.h5'\n",
|
||||
"\n",
|
||||
"ofilepath = os.path.join(output_dir_path,output_filename)\n",
|
||||
"\n",
|
||||
"h5lib.create_hdf5_file_from_dataframe(ofilepath,\n",
|
||||
" input_data_df, 'top-down', \n",
|
||||
" group_by_funcs = [group_by_sample, group_by_type, group_by_filenumber]\n",
|
||||
" )\n",
|
||||
"\n",
|
||||
"annotation_dict = {'Campaign name': 'SLS-Campaign-2023',\n",
|
||||
" 'Producers':'Thorsten, Luca, Zoe',\n",
|
||||
" 'Startdate': str(input_data_df['lastModifiedDatestr'].min()),\n",
|
||||
" 'Enddate': str(input_data_df['lastModifiedDatestr'].max())\n",
|
||||
" }\n",
|
||||
"h5lib.annotate_root_dir(ofilepath,annotation_dict)\n",
|
||||
"\n",
|
||||
"h5vis.display_group_hierarchy_on_a_treemap(ofilepath)\n",
|
||||
"\n",
|
||||
"print(':)')\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "test_atmos_chem_env",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.11.6"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
Reference in New Issue
Block a user