Add dataset, slurm_id, and FAILED status to models

Enhanced the models with new fields: a dataset field for Experiment Parameters and a slurm_id for Jobs. Introduced a FAILED status for the JobStatus enum. Updated functionality to handle datasets and trigger job creation based on dataset status.
This commit is contained in:
GotthardG
2025-04-29 22:52:36 +02:00
parent 9af2e84f9e
commit 57de665c7b
8 changed files with 202 additions and 55 deletions

View File

@ -273,6 +273,7 @@ class ExperimentParameters(Base):
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
run_number = Column(Integer, nullable=False)
beamline_parameters = Column(JSON, nullable=True)
dataset = Column(JSON, nullable=True)
sample_id = Column(Integer, ForeignKey("samples.id"), nullable=False)
@ -311,6 +312,7 @@ class JobStatus(str, enum.Enum):
TODO = "todo"
SUBMITTED = "submitted"
DONE = "done"
FAILED = "failed"
class Jobs(Base):
@ -323,3 +325,4 @@ class Jobs(Base):
experiment_parameters = relationship(ExperimentParameters)
created_at = Column(DateTime, server_default=func.now())
updated_at = Column(DateTime, onupdate=func.now())
slurm_id = Column(Integer, nullable=True)

View File

@ -41,6 +41,7 @@ async def job_event_generator(db: Session):
experiment_parameters=experiment.beamline_parameters
if experiment
else None,
slurm_id=None,
)
job_items.append(job_item)
@ -75,13 +76,14 @@ def update_jobs_status(payload: JobsUpdate, db: Session = Depends(get_db)):
# Update the status
job.status = payload.status
job.slurm_id = payload.slurm_id
# Optionally update 'updated_at'
from sqlalchemy import func
from datetime import datetime
job.updated_at = func.now()
job.updated_at = datetime.now()
db.commit()
db.refresh(job)
# Return the updated job's info as response
return JobsUpdate(job_id=job.id, status=job.status)
return JobsUpdate(job_id=job.id, status=job.status, slurm_id=job.slurm_id)

View File

@ -1,4 +1,5 @@
from fastapi import APIRouter, HTTPException, Depends, UploadFile, File, Form
from fastapi.encoders import jsonable_encoder
from sqlalchemy.orm import Session
from pathlib import Path
from typing import List
@ -18,6 +19,7 @@ from app.schemas import (
ResultResponse,
ResultCreate,
Results as ProcessingResults,
Datasets,
)
from app.models import (
Puck as PuckModel,
@ -350,19 +352,60 @@ def create_experiment_parameters_for_sample(
db.add(new_event)
db.commit()
new_job = JobModel(
sample_id=sample_id,
run_id=new_exp.id,
experiment_parameters=new_exp, # not sure yet
status=JobStatus.TODO,
)
db.add(new_job)
db.commit()
db.refresh(new_job)
return new_exp
@router.patch(
"/update-dataset/{sample_id}/{run_id}",
response_model=ExperimentParametersRead,
operation_id="update_dataset_for_experiment_run",
)
def update_experiment_run_dataset(
sample_id: int,
run_id: int,
dataset: Datasets,
db: Session = Depends(get_db),
):
# Find the run for this sample and run_id
exp = (
db.query(ExperimentParametersModel)
.filter(
ExperimentParametersModel.sample_id == sample_id,
ExperimentParametersModel.id == run_id,
)
.first()
)
if not exp:
raise HTTPException(
status_code=404,
detail="ExperimentParameters (run) not found for this sample",
)
exp.dataset = jsonable_encoder(dataset)
db.commit()
db.refresh(exp)
# Only create a job if status is "written" and job does not exist yet
if dataset.status == "written":
job_exists = (
db.query(JobModel)
.filter(JobModel.sample_id == sample_id, JobModel.run_id == run_id)
.first()
)
if not job_exists:
new_job = JobModel(
sample_id=sample_id,
run_id=run_id,
experiment_parameters=exp, # adjust this line as appropriate
status=JobStatus.TODO,
)
db.add(new_job)
db.commit()
db.refresh(new_job)
return exp
@router.post(
"/processing-results", response_model=ResultResponse, operation_id="create_result"
)

View File

@ -898,9 +898,16 @@ class BeamlineParameters(BaseModel):
# beamstopDiameter_mm: Optional[float] = None
class Datasets(BaseModel):
filepath: str
status: str
written_at: datetime
class ExperimentParametersBase(BaseModel):
run_number: int
beamline_parameters: Optional[BeamlineParameters] = None
dataset: Optional[Datasets] = None
sample_id: int
@ -915,6 +922,12 @@ class ExperimentParametersRead(ExperimentParametersBase):
from_attributes = True
class ExperimentParametersUpdate(BaseModel):
run_number: int
dataset: Optional[Datasets]
sample_id: int
class SampleResult(BaseModel):
sample_id: int
sample_name: str
@ -952,6 +965,7 @@ class JobsCreate(BaseModel):
created_at: datetime
updated_at: datetime
experiment_parameters: BeamlineParameters
slurm_id: Optional[int] = None
class Config:
from_attributes = True
@ -968,8 +982,10 @@ class JobsResponse(BaseModel):
updated_at: Optional[datetime]
data_collection_parameters: Optional[DataCollectionParameters] = None
experiment_parameters: BeamlineParameters
slurm_id: Optional[int] = None
class JobsUpdate(BaseModel):
job_id: int
status: str
slurm_id: int