diff --git a/input_files/campaignDescriptor1_LI.yaml b/input_files/campaignDescriptor1_LI.yaml index 2b6bda6..020fddc 100644 --- a/input_files/campaignDescriptor1_LI.yaml +++ b/input_files/campaignDescriptor1_LI.yaml @@ -13,7 +13,7 @@ group_id: '5505' experiment: 'kinetic_flowtube_study' # 'beamtime', 'smog_chamber_study' dataset_startdate: dataset_enddate: -actris_level: '0' +data_level: 0 # Instrument folders containing raw data from the campaign instrument_datafolder: diff --git a/input_files/campaignDescriptor2_TBR.yaml b/input_files/campaignDescriptor2_TBR.yaml index 232793a..5b95640 100644 --- a/input_files/campaignDescriptor2_TBR.yaml +++ b/input_files/campaignDescriptor2_TBR.yaml @@ -13,7 +13,7 @@ group_id: '5505' experiment: 'beamtime' # beamtime, smog_chamber, lab_experiment dataset_startdate: '2023-09-22' dataset_enddate: '2023-09-25' -actris_level: '0' +data_level: 0 institution : "PSI" filename_format : "institution,experiment,contact" diff --git a/input_files/campaignDescriptor3_NG.yaml b/input_files/campaignDescriptor3_NG.yaml index 445b140..644e9a2 100644 --- a/input_files/campaignDescriptor3_NG.yaml +++ b/input_files/campaignDescriptor3_NG.yaml @@ -13,7 +13,7 @@ group_id: '5505' experiment: 'smog_chamber_study' # beamtime, smog_chamber, lab_experiment dataset_startdate: dataset_enddate: -actris_level: '0' +data_level: 0 # Instrument folders containing raw data from the campaign instrument_datafolder: diff --git a/pipelines/data_integration.py b/pipelines/data_integration.py index ea58eb5..cfd1876 100644 --- a/pipelines/data_integration.py +++ b/pipelines/data_integration.py @@ -18,15 +18,10 @@ if dimaPath not in sys.path: # Avoid duplicate entries import yaml import logging from datetime import datetime -<<<<<<< HEAD import shutil - - -======= # Importing chain class from itertools from itertools import chain import shutil ->>>>>>> 978101f9c2d9b210a22749b8191330c6d8a97959 # Import DIMA modules try: from dima.src import hdf5_writer as hdf5_lib @@ -63,7 +58,7 @@ def load_config_and_setup_logging(yaml_config_file_path, log_dir): # Define required keys required_keys = [ 'experiment', 'contact', 'input_file_directory', 'output_file_directory', - 'instrument_datafolder', 'project', 'actris_level' + 'instrument_datafolder', 'project', 'data_level' ] # Supported integration modes @@ -264,7 +259,7 @@ def run_pipeline(path_to_config_yamlFile, log_dir='logs/'): select_dir_keywords = config_dict['instrument_datafolder'] # Define root folder metadata dictionary - root_metadata_dict = {key : config_dict[key] for key in ['project', 'experiment', 'contact', 'actris_level']} + root_metadata_dict = {key : config_dict[key] for key in ['project', 'experiment', 'contact', 'data_level']} # Get dataset start and end dates dataset_startdate = config_dict['dataset_startdate']