mirror of
https://gitea.psi.ch/APOG/acsmnode.git
synced 2025-06-25 05:31:09 +02:00
Refactor notebooks for workflow generation
This commit is contained in:
@ -61,7 +61,11 @@
|
|||||||
"\n",
|
"\n",
|
||||||
"campaign_descriptor = load_project_yaml_files(project_path, \"campaignDescriptor.yaml\")\n",
|
"campaign_descriptor = load_project_yaml_files(project_path, \"campaignDescriptor.yaml\")\n",
|
||||||
"YEAR = campaign_descriptor['year']\n",
|
"YEAR = campaign_descriptor['year']\n",
|
||||||
"STATION_ABBR = campaign_descriptor['station_abbr']"
|
"STATION_ABBR = campaign_descriptor['station_abbr']\n",
|
||||||
|
"\n",
|
||||||
|
"workflow_fname = f'workflow_acsm_data_{STATION_ABBR}_{YEAR}'\n",
|
||||||
|
"\n",
|
||||||
|
"print(workflow_fname)"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -95,7 +99,9 @@
|
|||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"# Uncomment and define the following variables manually to reanalize previous data collections\n",
|
"# Uncomment and define the following variables manually to reanalize previous data collections\n",
|
||||||
"#CAMPAIGN_DATA_FILE = '../data/collection_PAY_2024_2025-05-26_2025-05-26.h5'\n",
|
"#CAMPAIGN_DATA_FILE = '../data/collection_PAY_2024_2025-06-05_2025-06-05.h5'\n",
|
||||||
|
"#CAMPAIGN_DATA_FILE = '../data/collection_JFJ_2024_2025-06-06_2025-06-06.h5'\n",
|
||||||
|
"#APPEND_DATA_DIR = '../data/collection_JFJ_2024_2025-06-06_2025-06-06'\n",
|
||||||
"#APPEND_DATA_DIR = '../data/collection_PAY_2024_2025-05-26_2025-05-26'\n",
|
"#APPEND_DATA_DIR = '../data/collection_PAY_2024_2025-05-26_2025-05-26'\n",
|
||||||
"#CAMPAIGN_DATA_FILE = '../data/collection_PAY_2024_2025-05-21_2025-05-21.h5'\n",
|
"#CAMPAIGN_DATA_FILE = '../data/collection_PAY_2024_2025-05-21_2025-05-21.h5'\n",
|
||||||
"#APPEND_DATA_DIR = '../data/collection_PAY_2024_2025-05-21_2025-05-21'"
|
"#APPEND_DATA_DIR = '../data/collection_PAY_2024_2025-05-21_2025-05-21'"
|
||||||
@ -115,7 +121,7 @@
|
|||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"update_datachain_params(CAMPAIGN_DATA_FILE, 'ACSM_TOFWARE/2024')"
|
"update_datachain_params(CAMPAIGN_DATA_FILE, 'ACSM_TOFWARE/2024', capture_renku_metadata=True, workflow_name=workflow_fname)"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -140,7 +146,7 @@
|
|||||||
"path_to_data_file = CAMPAIGN_DATA_FILE\n",
|
"path_to_data_file = CAMPAIGN_DATA_FILE\n",
|
||||||
"path_to_calibration_file = '../pipelines/params/calibration_factors.yaml'\n",
|
"path_to_calibration_file = '../pipelines/params/calibration_factors.yaml'\n",
|
||||||
"\n",
|
"\n",
|
||||||
"apply_calibration_factors(path_to_data_file,path_to_calibration_file)\n"
|
"apply_calibration_factors(path_to_data_file,path_to_calibration_file, capture_renku_metadata=True, workflow_name=workflow_fname)\n"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -166,7 +172,8 @@
|
|||||||
"#command = ['python', 'pipelines/steps/compute_automated_flags.py', path_to_data_file, dataset_name, path_to_config_file]\n",
|
"#command = ['python', 'pipelines/steps/compute_automated_flags.py', path_to_data_file, dataset_name, path_to_config_file]\n",
|
||||||
"#status = subprocess.run(command, capture_output=True, check=True)\n",
|
"#status = subprocess.run(command, capture_output=True, check=True)\n",
|
||||||
"#print(status.stdout.decode())\n",
|
"#print(status.stdout.decode())\n",
|
||||||
"generate_flags(path_to_data_file, 'diagnostics')\n",
|
"path_to_data_file = CAMPAIGN_DATA_FILE\n",
|
||||||
|
"generate_flags(path_to_data_file, 'diagnostics', capture_renku_metadata=True, workflow_name=workflow_fname)\n",
|
||||||
"\n"
|
"\n"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
@ -177,7 +184,7 @@
|
|||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"\n",
|
"\n",
|
||||||
"generate_flags(path_to_data_file, 'cpc')"
|
"generate_flags(path_to_data_file, 'cpc', capture_renku_metadata=True, workflow_name=workflow_fname)"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -214,7 +221,7 @@
|
|||||||
"#command = ['python', 'pipelines/steps/compute_automated_flags.py', path_to_data_file, dataset_name, path_to_config_file]\n",
|
"#command = ['python', 'pipelines/steps/compute_automated_flags.py', path_to_data_file, dataset_name, path_to_config_file]\n",
|
||||||
"#status = subprocess.run(command, capture_output=True, check=True)\n",
|
"#status = subprocess.run(command, capture_output=True, check=True)\n",
|
||||||
"#print(status.stdout.decode())\n",
|
"#print(status.stdout.decode())\n",
|
||||||
"generate_flags(path_to_data_file, 'species')"
|
"generate_flags(path_to_data_file, 'species', capture_renku_metadata=True, workflow_name=workflow_fname)"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -249,7 +256,7 @@
|
|||||||
"month = \"2-3\"\n",
|
"month = \"2-3\"\n",
|
||||||
"with warnings.catch_warnings():\n",
|
"with warnings.catch_warnings():\n",
|
||||||
" warnings.simplefilter('ignore')\n",
|
" warnings.simplefilter('ignore')\n",
|
||||||
" prepare_ebas_submission([PATH1, PATH2, PATH3], PATH4, month)\n"
|
" prepare_ebas_submission([PATH1, PATH2, PATH3], PATH4, month,capture_renku_metadata=True, workflow_name=workflow_fname)\n"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -346,7 +353,7 @@
|
|||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"kernelspec": {
|
"kernelspec": {
|
||||||
"display_name": "dash_multi_chem_env",
|
"display_name": "Python 3",
|
||||||
"language": "python",
|
"language": "python",
|
||||||
"name": "python3"
|
"name": "python3"
|
||||||
},
|
},
|
||||||
@ -360,7 +367,7 @@
|
|||||||
"name": "python",
|
"name": "python",
|
||||||
"nbconvert_exporter": "python",
|
"nbconvert_exporter": "python",
|
||||||
"pygments_lexer": "ipython3",
|
"pygments_lexer": "ipython3",
|
||||||
"version": "3.11.9"
|
"version": "3.11.10"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"nbformat": 4,
|
"nbformat": 4,
|
||||||
|
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user