diff --git a/pipelines/steps/apply_calibration_factors.py b/pipelines/steps/apply_calibration_factors.py index d3ef9cf..eca6368 100644 --- a/pipelines/steps/apply_calibration_factors.py +++ b/pipelines/steps/apply_calibration_factors.py @@ -210,8 +210,8 @@ def main(data_file, calibration_file): dataManager.extract_and_load_dataset_metadata() dataset_metadata_df = dataManager.dataset_metadata_df.copy() - STATION = load_project_yaml_files(projectPath,'campaignDescriptor.yaml')['station'] - keywords = ['ACSM_TOFWARE/', f'ACSM_{STATION}_', '_timeseries.txt/data_table'] + STATION_ABBR = load_project_yaml_files(projectPath,'campaignDescriptor.yaml')['station_abbr'] + keywords = ['ACSM_TOFWARE/', f'ACSM_{STATION_ABBR}_', '_timeseries.txt/data_table'] find_keyword = [all(keyword in item for keyword in keywords) for item in dataset_metadata_df['dataset_name']] if sum(find_keyword) != 1: diff --git a/pipelines/steps/generate_flags.py b/pipelines/steps/generate_flags.py index 655f8d4..3751601 100644 --- a/pipelines/steps/generate_flags.py +++ b/pipelines/steps/generate_flags.py @@ -238,14 +238,14 @@ def main(data_file, flag_type): dataManager.extract_and_load_dataset_metadata() dataset_metadata_df = dataManager.dataset_metadata_df.copy() - STATION = load_project_yaml_files(projectPath,'campaignDescriptor.yaml')['station'] + STATION_ABBR = load_project_yaml_files(projectPath,'campaignDescriptor.yaml')['station_abbr'] # Find dataset associated with diagnostic channels if flag_type == 'diagnostics': - keywords = [f'ACSM_{STATION}_','_meta.txt/data_table'] + keywords = [f'ACSM_{STATION_ABBR}_','_meta.txt/data_table'] find_keyword = [all(keyword in item for keyword in keywords) for item in dataset_metadata_df['dataset_name']] if flag_type == 'species': - keywords = [f'ACSM_{STATION}_','_timeseries.txt/data_table'] + keywords = [f'ACSM_{STATION_ABBR}_','_timeseries.txt/data_table'] find_keyword = [all(keyword in item for keyword in keywords) for item in dataset_metadata_df['dataset_name']] # Specify source dataset to be extracted from input hdf5 data file diff --git a/pipelines/steps/prepare_ebas_submission.py b/pipelines/steps/prepare_ebas_submission.py index c1c7277..d440cbd 100644 --- a/pipelines/steps/prepare_ebas_submission.py +++ b/pipelines/steps/prepare_ebas_submission.py @@ -186,7 +186,7 @@ def main(paths_to_processed_files : list, path_to_flags : str, month : int = Non campaignDescriptorDict = load_project_yaml_files(projectPath, 'campaignDescriptor.yaml') # Validate required fields - station = validate_required_field(campaignDescriptorDict, 'station') + STATION_ABBR = validate_required_field(campaignDescriptorDict, 'station_abbr') instrument_name = validate_required_field(campaignDescriptorDict, 'instrument_name') year = validate_required_field(campaignDescriptorDict, 'year') @@ -194,8 +194,8 @@ def main(paths_to_processed_files : list, path_to_flags : str, month : int = Non output_dir = os.path.join(projectPath, 'data') os.makedirs(output_dir, exist_ok=True) - output_file1 = os.path.join(output_dir, f'{station}_{instrument_name}_{year}.txt') - output_file2 = os.path.join(output_dir, f'{station}_{instrument_name}_FLAGS_{year}.txt') + output_file1 = os.path.join(output_dir, f'{STATION_ABBR}_{instrument_name}_{year}.txt') + output_file2 = os.path.join(output_dir, f'{STATION_ABBR}_{instrument_name}_FLAGS_{year}.txt') #output_file1 = os.path.join(output_dir, f'JFJ_ACSM-017_2024_month{args.month}.txt' if args.month else 'JFJ_ACSM-017_2024.txt') #output_file2 = os.path.join(output_dir, f'JFJ_ACSM-017_FLAGS_2024_month{args.month}.txt' if args.month else 'JFJ_ACSM-017_FLAGS_2024.txt') diff --git a/pipelines/steps/update_actris_header.py b/pipelines/steps/update_actris_header.py index 0c4becb..d9ede70 100644 --- a/pipelines/steps/update_actris_header.py +++ b/pipelines/steps/update_actris_header.py @@ -39,7 +39,7 @@ def load_yaml(path): actris_metadata = { 'originator': metadata['originator'], 'submitter': metadata['submitter'], - 'station': metadata['station'], + 'station_abbr': metadata['station_abbr'], } actris_metadata['originator_name'] = metadata['originator'].get('name', '') actris_metadata['submitter_name'] = metadata['submitter'].get('name', '') @@ -60,11 +60,11 @@ def main(data_descriptor_path, dry_run = None): metadata = load_yaml(data_descriptor_path) print(metadata) - station = metadata.get('station', None) - if not station: + STATION_ABBR = metadata.get('station_abbr', None) + if not STATION_ABBR: raise RuntimeError( - f'"station" is not defined in {data_descriptor_path}. ' - 'Make sure you specify it as station: "JFJ" or station: "PAY"' + f'"station_abbr" is not defined in {data_descriptor_path}. ' + 'Make sure you specify it as station_abbr: "JFJ" or station_abbr: "PAY"' ) # Define header paths @@ -78,11 +78,11 @@ def main(data_descriptor_path, dry_run = None): "PAY": "third_party/acsmProcessingSoftware/src/cfg/actris_header/PAY_ACSM_092.actris_header", } - if station not in header_template_map: - raise RuntimeError(f'Station "{station}" is not supported. Choose from: {list(header_template_map)}') + if STATION_ABBR not in header_template_map: + raise RuntimeError(f'Attribute station_abbr "{STATION_ABBR}" is not supported. Choose from: {list(header_template_map)}') #projectPath = resolve_project_path() - header_path = os.path.join(projectPath, header_template_map[station]) + header_path = os.path.join(projectPath, header_template_map[STATION_ABBR]) if not os.path.exists(header_path): raise FileNotFoundError(f"Header template not found at: {header_path}") @@ -95,7 +95,7 @@ def main(data_descriptor_path, dry_run = None): out_path = "output.test.ext" print("[DRY RUN] Target header was not overwritten.") else: - out_path = os.path.join(projectPath, header_uptate_map[station]) + out_path = os.path.join(projectPath, header_uptate_map[STATION_ABBR]) print("[LIVE RUN] Target header will be updated.") print(f"Writing to: {out_path}")