Add dataset, slurm_id, and FAILED status to models
Enhanced the models with new fields: a dataset field for Experiment Parameters and a slurm_id for Jobs. Introduced a FAILED status for the JobStatus enum. Updated functionality to handle datasets and trigger job creation based on dataset status.
This commit is contained in:
@ -273,6 +273,7 @@ class ExperimentParameters(Base):
|
||||
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
|
||||
run_number = Column(Integer, nullable=False)
|
||||
beamline_parameters = Column(JSON, nullable=True)
|
||||
dataset = Column(JSON, nullable=True)
|
||||
sample_id = Column(Integer, ForeignKey("samples.id"), nullable=False)
|
||||
|
||||
|
||||
@ -311,6 +312,7 @@ class JobStatus(str, enum.Enum):
|
||||
TODO = "todo"
|
||||
SUBMITTED = "submitted"
|
||||
DONE = "done"
|
||||
FAILED = "failed"
|
||||
|
||||
|
||||
class Jobs(Base):
|
||||
@ -323,3 +325,4 @@ class Jobs(Base):
|
||||
experiment_parameters = relationship(ExperimentParameters)
|
||||
created_at = Column(DateTime, server_default=func.now())
|
||||
updated_at = Column(DateTime, onupdate=func.now())
|
||||
slurm_id = Column(Integer, nullable=True)
|
||||
|
@ -41,6 +41,7 @@ async def job_event_generator(db: Session):
|
||||
experiment_parameters=experiment.beamline_parameters
|
||||
if experiment
|
||||
else None,
|
||||
slurm_id=None,
|
||||
)
|
||||
|
||||
job_items.append(job_item)
|
||||
@ -75,13 +76,14 @@ def update_jobs_status(payload: JobsUpdate, db: Session = Depends(get_db)):
|
||||
|
||||
# Update the status
|
||||
job.status = payload.status
|
||||
job.slurm_id = payload.slurm_id
|
||||
# Optionally update 'updated_at'
|
||||
from sqlalchemy import func
|
||||
from datetime import datetime
|
||||
|
||||
job.updated_at = func.now()
|
||||
job.updated_at = datetime.now()
|
||||
|
||||
db.commit()
|
||||
db.refresh(job)
|
||||
|
||||
# Return the updated job's info as response
|
||||
return JobsUpdate(job_id=job.id, status=job.status)
|
||||
return JobsUpdate(job_id=job.id, status=job.status, slurm_id=job.slurm_id)
|
||||
|
@ -1,4 +1,5 @@
|
||||
from fastapi import APIRouter, HTTPException, Depends, UploadFile, File, Form
|
||||
from fastapi.encoders import jsonable_encoder
|
||||
from sqlalchemy.orm import Session
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
@ -18,6 +19,7 @@ from app.schemas import (
|
||||
ResultResponse,
|
||||
ResultCreate,
|
||||
Results as ProcessingResults,
|
||||
Datasets,
|
||||
)
|
||||
from app.models import (
|
||||
Puck as PuckModel,
|
||||
@ -350,19 +352,60 @@ def create_experiment_parameters_for_sample(
|
||||
db.add(new_event)
|
||||
db.commit()
|
||||
|
||||
new_job = JobModel(
|
||||
sample_id=sample_id,
|
||||
run_id=new_exp.id,
|
||||
experiment_parameters=new_exp, # not sure yet
|
||||
status=JobStatus.TODO,
|
||||
)
|
||||
db.add(new_job)
|
||||
db.commit()
|
||||
db.refresh(new_job)
|
||||
|
||||
return new_exp
|
||||
|
||||
|
||||
@router.patch(
|
||||
"/update-dataset/{sample_id}/{run_id}",
|
||||
response_model=ExperimentParametersRead,
|
||||
operation_id="update_dataset_for_experiment_run",
|
||||
)
|
||||
def update_experiment_run_dataset(
|
||||
sample_id: int,
|
||||
run_id: int,
|
||||
dataset: Datasets,
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
# Find the run for this sample and run_id
|
||||
exp = (
|
||||
db.query(ExperimentParametersModel)
|
||||
.filter(
|
||||
ExperimentParametersModel.sample_id == sample_id,
|
||||
ExperimentParametersModel.id == run_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if not exp:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail="ExperimentParameters (run) not found for this sample",
|
||||
)
|
||||
|
||||
exp.dataset = jsonable_encoder(dataset)
|
||||
db.commit()
|
||||
db.refresh(exp)
|
||||
|
||||
# Only create a job if status is "written" and job does not exist yet
|
||||
if dataset.status == "written":
|
||||
job_exists = (
|
||||
db.query(JobModel)
|
||||
.filter(JobModel.sample_id == sample_id, JobModel.run_id == run_id)
|
||||
.first()
|
||||
)
|
||||
if not job_exists:
|
||||
new_job = JobModel(
|
||||
sample_id=sample_id,
|
||||
run_id=run_id,
|
||||
experiment_parameters=exp, # adjust this line as appropriate
|
||||
status=JobStatus.TODO,
|
||||
)
|
||||
db.add(new_job)
|
||||
db.commit()
|
||||
db.refresh(new_job)
|
||||
|
||||
return exp
|
||||
|
||||
|
||||
@router.post(
|
||||
"/processing-results", response_model=ResultResponse, operation_id="create_result"
|
||||
)
|
||||
|
@ -898,9 +898,16 @@ class BeamlineParameters(BaseModel):
|
||||
# beamstopDiameter_mm: Optional[float] = None
|
||||
|
||||
|
||||
class Datasets(BaseModel):
|
||||
filepath: str
|
||||
status: str
|
||||
written_at: datetime
|
||||
|
||||
|
||||
class ExperimentParametersBase(BaseModel):
|
||||
run_number: int
|
||||
beamline_parameters: Optional[BeamlineParameters] = None
|
||||
dataset: Optional[Datasets] = None
|
||||
sample_id: int
|
||||
|
||||
|
||||
@ -915,6 +922,12 @@ class ExperimentParametersRead(ExperimentParametersBase):
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class ExperimentParametersUpdate(BaseModel):
|
||||
run_number: int
|
||||
dataset: Optional[Datasets]
|
||||
sample_id: int
|
||||
|
||||
|
||||
class SampleResult(BaseModel):
|
||||
sample_id: int
|
||||
sample_name: str
|
||||
@ -952,6 +965,7 @@ class JobsCreate(BaseModel):
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
experiment_parameters: BeamlineParameters
|
||||
slurm_id: Optional[int] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
@ -968,8 +982,10 @@ class JobsResponse(BaseModel):
|
||||
updated_at: Optional[datetime]
|
||||
data_collection_parameters: Optional[DataCollectionParameters] = None
|
||||
experiment_parameters: BeamlineParameters
|
||||
slurm_id: Optional[int] = None
|
||||
|
||||
|
||||
class JobsUpdate(BaseModel):
|
||||
job_id: int
|
||||
status: str
|
||||
slurm_id: int
|
||||
|
@ -154,8 +154,8 @@ async def lifespan(app: FastAPI):
|
||||
load_slots_data(db)
|
||||
else: # dev or test environments
|
||||
print(f"{environment.capitalize()} environment: Regenerating database.")
|
||||
Base.metadata.drop_all(bind=engine)
|
||||
Base.metadata.create_all(bind=engine)
|
||||
# Base.metadata.drop_all(bind=engine)
|
||||
# Base.metadata.create_all(bind=engine)
|
||||
# from sqlalchemy.engine import reflection
|
||||
# from app.models import ExperimentParameters # adjust the import as needed
|
||||
# inspector = reflection.Inspector.from_engine(engine)
|
||||
|
@ -9,7 +9,7 @@
|
||||
"is_executing": true
|
||||
},
|
||||
"ExecuteTime": {
|
||||
"start_time": "2025-04-29T12:21:15.813326Z"
|
||||
"start_time": "2025-04-29T20:30:28.354331Z"
|
||||
}
|
||||
},
|
||||
"source": [
|
||||
@ -20,10 +20,11 @@
|
||||
"SSE_URL = \"https://127.0.0.1:8000/processing/jobs/stream\"\n",
|
||||
"UPDATE_URL = \"https://127.0.0.1:8000/processing/jobs/update_status\"\n",
|
||||
"\n",
|
||||
"def submit_job_update(job_id, status):\n",
|
||||
"def submit_job_update(job_id, status, slurm_id):\n",
|
||||
" payload = {\n",
|
||||
" \"job_id\": job_id,\n",
|
||||
" \"status\": status\n",
|
||||
" \"status\": status,\n",
|
||||
" \"slurm_id\": slurm_id,\n",
|
||||
" }\n",
|
||||
" try:\n",
|
||||
" response = requests.post(UPDATE_URL, json=payload, verify=False)\n",
|
||||
@ -48,11 +49,11 @@
|
||||
" jobs = json.loads(event.data)\n",
|
||||
" print(f\"Jobs received: {jobs}\")\n",
|
||||
"\n",
|
||||
" #for job in jobs:\n",
|
||||
" # job_id = job.get(\"job_id\")\n",
|
||||
" # print(f\"Job ID: {job_id}, Current status: {job.get('status')}\")\n",
|
||||
" # # Immediately update status to \"submitted\"\n",
|
||||
" # submit_job_update(job_id, \"submitted\")\n",
|
||||
" for job in jobs:\n",
|
||||
" job_id = job.get(\"job_id\")\n",
|
||||
" print(f\"Job ID: {job_id}, Current status: {job.get('status')}\")\n",
|
||||
" # Immediately update status to \"submitted\"\n",
|
||||
" submit_job_update(job_id, \"submitted\", 76545678)\n",
|
||||
" except json.JSONDecodeError as e:\n",
|
||||
" print(f\"Error decoding event data: {e}\")\n",
|
||||
" except Exception as e:\n",
|
||||
@ -66,13 +67,18 @@
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Starting job status updater...\n"
|
||||
"Starting job status updater...\n",
|
||||
"Jobs received: [{'job_id': 1, 'sample_id': 230, 'run_id': 1, 'sample_name': 'Sample230', 'status': 'todo', 'type': 'default_type', 'created_at': '2025-04-29T20:25:20.888394', 'updated_at': None, 'data_collection_parameters': None, 'experiment_parameters': {'synchrotron': 'Swiss Light Source', 'beamline': 'PXIII', 'detector': {'manufacturer': 'DECTRIS', 'model': 'PILATUS4 2M', 'type': 'photon-counting', 'serialNumber': '16684dscsd668468', 'detectorDistance_mm': 95.0, 'beamCenterX_px': 512.0, 'beamCenterY_px': 512.0, 'pixelSizeX_um': 150.0, 'pixelSizeY_um': 150.0}, 'wavelength': 1.0, 'ringCurrent_A': 0.0, 'ringMode': 'Machine Down', 'undulator': None, 'undulatorgap_mm': None, 'monochromator': 'Si111', 'transmission': 1.0, 'focusingOptic': 'Kirkpatrick-Baez', 'beamlineFluxAtSample_ph_s': 0.0, 'beamSizeWidth': 30.0, 'beamSizeHeight': 30.0, 'characterization': None, 'rotation': {'omegaStart_deg': 0.0, 'omegaStep': 0.1, 'chi': 0.0, 'phi': 10.0, 'numberOfImages': 3600, 'exposureTime_s': 0.02}, 'gridScan': None, 'jet': None, 'cryojetTemperature_K': None, 'humidifierTemperature_K': None, 'humidifierHumidity': None}, 'slurm_id': None}]\n",
|
||||
"Job ID: 1, Current status: todo\n",
|
||||
"✅ Job 1 status updated to 'submitted'. Response: {'job_id': 1, 'status': 'submitted', 'slurm_id': 76545678}\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/urllib3/connectionpool.py:1103: InsecureRequestWarning: Unverified HTTPS request is being made to host '127.0.0.1'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings\n",
|
||||
" warnings.warn(\n",
|
||||
"/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/urllib3/connectionpool.py:1103: InsecureRequestWarning: Unverified HTTPS request is being made to host '127.0.0.1'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings\n",
|
||||
" warnings.warn(\n"
|
||||
]
|
||||
|
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "aareDB"
|
||||
version = "0.1.1a1"
|
||||
version = "0.1.1a2"
|
||||
description = "Backend for next gen sample management system"
|
||||
authors = [{name = "Guillaume Gotthard", email = "guillaume.gotthard@psi.ch"}]
|
||||
license = {text = "MIT"}
|
||||
|
Reference in New Issue
Block a user