```
Add duplicate detection for spreadsheet data processing Implemented logic to detect and handle duplicate 'positioninpuck' entries within the same puck during spreadsheet processing. Updated backend to validate duplicates and provide detailed error messages. Enhanced frontend to visually highlight duplicate errors and allow better user feedback during cell editing. ```
This commit is contained in:
@ -152,9 +152,7 @@ class SampleSpreadsheetImporter:
|
||||
model = []
|
||||
errors = []
|
||||
raw_data = []
|
||||
headers = []
|
||||
|
||||
# Skip the first 3 rows
|
||||
rows = list(sheet.iter_rows(min_row=4, values_only=True))
|
||||
logger.debug(f"Starting to process {len(rows)} rows from the sheet")
|
||||
|
||||
@ -200,6 +198,8 @@ class SampleSpreadsheetImporter:
|
||||
"chiphiangles",
|
||||
]
|
||||
|
||||
duplicate_check = {}
|
||||
|
||||
for index, row in enumerate(rows):
|
||||
if not any(row):
|
||||
logger.debug(f"Skipping empty row at index {index}")
|
||||
@ -241,13 +241,12 @@ class SampleSpreadsheetImporter:
|
||||
corrected = True
|
||||
defaulted_columns.append(column_name)
|
||||
|
||||
# Update the record with cleaned value (store only the cleaned part,
|
||||
# not the tuple)
|
||||
# Update the record with cleaned value
|
||||
record[column_name] = cleaned_value
|
||||
except (ValueError, TypeError) as e:
|
||||
logger.error(
|
||||
f"Validation error for row {index + 4}"
|
||||
f", column '{column_name}': {str(e)}"
|
||||
f"Validation error for row {index + 4},"
|
||||
f" column '{column_name}': {str(e)}"
|
||||
)
|
||||
errors.append(
|
||||
{
|
||||
@ -258,6 +257,37 @@ class SampleSpreadsheetImporter:
|
||||
}
|
||||
)
|
||||
|
||||
# Validate duplicate 'positioninpuck' within the same puck
|
||||
dewarname = record.get("dewarname")
|
||||
puckname = record.get("puckname")
|
||||
positioninpuck = record.get("positioninpuck")
|
||||
|
||||
if (
|
||||
dewarname and puckname and positioninpuck is not None
|
||||
): # Only check if all required fields exist
|
||||
duplicate_key = f"{dewarname}-{puckname}"
|
||||
if duplicate_key not in duplicate_check:
|
||||
duplicate_check[duplicate_key] = set()
|
||||
|
||||
if positioninpuck in duplicate_check[duplicate_key]:
|
||||
# Add error for duplicate position in the same puck
|
||||
logger.warning(
|
||||
f"Duplicate position '{positioninpuck}' found in puck"
|
||||
f" '{puckname}' (dewar: '{dewarname}')"
|
||||
)
|
||||
errors.append(
|
||||
{
|
||||
"row": index + 4, # Adjust row number for 1-based indexing
|
||||
"column": "positioninpuck", # The problematic column
|
||||
"value": positioninpuck, # The value causing the issue
|
||||
"message": f"Duplicate position '{positioninpuck}'"
|
||||
f" found in puck '{puckname}' of dewar '{dewarname}'.",
|
||||
}
|
||||
)
|
||||
|
||||
else:
|
||||
duplicate_check[duplicate_key].add(positioninpuck)
|
||||
|
||||
# Build metadata for the row
|
||||
raw_data.append(
|
||||
{
|
||||
@ -267,9 +297,7 @@ class SampleSpreadsheetImporter:
|
||||
defaulted_columns
|
||||
), # True if any defaults were applied
|
||||
"corrected": corrected, # True if any value was corrected
|
||||
# List of corrected columns (if any)
|
||||
"corrected_columns": corrected_columns,
|
||||
# List of defaulted columns (if any)
|
||||
"defaulted_columns": defaulted_columns,
|
||||
}
|
||||
)
|
||||
|
Reference in New Issue
Block a user