Add dataset, slurm_id, and FAILED status to models
Enhanced the models with new fields: a dataset field for Experiment Parameters and a slurm_id for Jobs. Introduced a FAILED status for the JobStatus enum. Updated functionality to handle datasets and trigger job creation based on dataset status.
This commit is contained in:
@ -41,6 +41,7 @@ async def job_event_generator(db: Session):
|
||||
experiment_parameters=experiment.beamline_parameters
|
||||
if experiment
|
||||
else None,
|
||||
slurm_id=None,
|
||||
)
|
||||
|
||||
job_items.append(job_item)
|
||||
@ -75,13 +76,14 @@ def update_jobs_status(payload: JobsUpdate, db: Session = Depends(get_db)):
|
||||
|
||||
# Update the status
|
||||
job.status = payload.status
|
||||
job.slurm_id = payload.slurm_id
|
||||
# Optionally update 'updated_at'
|
||||
from sqlalchemy import func
|
||||
from datetime import datetime
|
||||
|
||||
job.updated_at = func.now()
|
||||
job.updated_at = datetime.now()
|
||||
|
||||
db.commit()
|
||||
db.refresh(job)
|
||||
|
||||
# Return the updated job's info as response
|
||||
return JobsUpdate(job_id=job.id, status=job.status)
|
||||
return JobsUpdate(job_id=job.id, status=job.status, slurm_id=job.slurm_id)
|
||||
|
@ -1,4 +1,5 @@
|
||||
from fastapi import APIRouter, HTTPException, Depends, UploadFile, File, Form
|
||||
from fastapi.encoders import jsonable_encoder
|
||||
from sqlalchemy.orm import Session
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
@ -18,6 +19,7 @@ from app.schemas import (
|
||||
ResultResponse,
|
||||
ResultCreate,
|
||||
Results as ProcessingResults,
|
||||
Datasets,
|
||||
)
|
||||
from app.models import (
|
||||
Puck as PuckModel,
|
||||
@ -350,19 +352,60 @@ def create_experiment_parameters_for_sample(
|
||||
db.add(new_event)
|
||||
db.commit()
|
||||
|
||||
new_job = JobModel(
|
||||
sample_id=sample_id,
|
||||
run_id=new_exp.id,
|
||||
experiment_parameters=new_exp, # not sure yet
|
||||
status=JobStatus.TODO,
|
||||
)
|
||||
db.add(new_job)
|
||||
db.commit()
|
||||
db.refresh(new_job)
|
||||
|
||||
return new_exp
|
||||
|
||||
|
||||
@router.patch(
|
||||
"/update-dataset/{sample_id}/{run_id}",
|
||||
response_model=ExperimentParametersRead,
|
||||
operation_id="update_dataset_for_experiment_run",
|
||||
)
|
||||
def update_experiment_run_dataset(
|
||||
sample_id: int,
|
||||
run_id: int,
|
||||
dataset: Datasets,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
# Find the run for this sample and run_id
|
||||
exp = (
|
||||
db.query(ExperimentParametersModel)
|
||||
.filter(
|
||||
ExperimentParametersModel.sample_id == sample_id,
|
||||
ExperimentParametersModel.id == run_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if not exp:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail="ExperimentParameters (run) not found for this sample",
|
||||
)
|
||||
|
||||
exp.dataset = jsonable_encoder(dataset)
|
||||
db.commit()
|
||||
db.refresh(exp)
|
||||
|
||||
# Only create a job if status is "written" and job does not exist yet
|
||||
if dataset.status == "written":
|
||||
job_exists = (
|
||||
db.query(JobModel)
|
||||
.filter(JobModel.sample_id == sample_id, JobModel.run_id == run_id)
|
||||
.first()
|
||||
)
|
||||
if not job_exists:
|
||||
new_job = JobModel(
|
||||
sample_id=sample_id,
|
||||
run_id=run_id,
|
||||
experiment_parameters=exp, # adjust this line as appropriate
|
||||
status=JobStatus.TODO,
|
||||
)
|
||||
db.add(new_job)
|
||||
db.commit()
|
||||
db.refresh(new_job)
|
||||
|
||||
return exp
|
||||
|
||||
|
||||
@router.post(
|
||||
"/processing-results", response_model=ResultResponse, operation_id="create_result"
|
||||
)
|
||||
|
Reference in New Issue
Block a user