Update src/hdf5_ops.py to allow for replicates after flattening directory structures.
This commit is contained in:
@ -737,9 +737,29 @@ def save_file_dict_to_hdf5(h5file, group_name, file_dict):
|
||||
try:
|
||||
# Create group and add their attributes
|
||||
filename = file_dict['name']
|
||||
group = h5file[group_name].create_group(name=filename)
|
||||
|
||||
# Base filename to use as group name
|
||||
base_filename = file_dict['name']
|
||||
candidate_name = base_filename
|
||||
replicate_index = 0
|
||||
|
||||
# Check for existing group and find a free name
|
||||
parent_group = h5file.require_group(group_name)
|
||||
while candidate_name in parent_group:
|
||||
replicate_index += 1
|
||||
candidate_name = f"{base_filename}_{replicate_index}"
|
||||
|
||||
group = h5file[group_name].create_group(name=candidate_name )
|
||||
# Add group attributes
|
||||
group.attrs.update(file_dict['attributes_dict'])
|
||||
|
||||
# Annotate replicate if renamed
|
||||
if replicate_index > 0:
|
||||
group.attrs['replicate_of'] = base_filename
|
||||
group.attrs['replicate_info'] = (
|
||||
f"Renamed due to existing group with same name. "
|
||||
f"This is replicate #{replicate_index}."
|
||||
)
|
||||
|
||||
# Add datasets to the just created group
|
||||
for dataset in file_dict['datasets']:
|
||||
|
Reference in New Issue
Block a user