Refactor AareDB backend and update schemas and paths.
Revised backend schema definitions, removing unnecessary attributes and adding new configurations. Updated file path references to align with the aaredb structure. Cleaned up redundant notebook content and commented out unused database regeneration logic in the backend. Added posting a result to the database
This commit is contained in:
parent
db6164ac3f
commit
5a0047b6d5
@ -278,10 +278,14 @@ class Results(Base):
|
||||
__tablename__ = "results"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
|
||||
# pgroup = Column(String(255), nullable=False)
|
||||
result = Column(JSON, nullable=True)
|
||||
result_id = Column(Integer, ForeignKey("experiment_parameters.id"), nullable=False)
|
||||
result = Column(JSON, nullable=False) # store the full result object as JSON
|
||||
sample_id = Column(Integer, ForeignKey("samples.id"), nullable=False)
|
||||
run_id = Column(Integer, ForeignKey("experiment_parameters.id"), nullable=False)
|
||||
|
||||
# optional relationships if you wish to query easily
|
||||
# sample = relationship("SampleModel", backref="results")
|
||||
# experiment_parameters = relationship("ExperimentParametersModel",
|
||||
# backref="results")
|
||||
|
||||
|
||||
# method = Column(String(255), nullable=False)
|
||||
|
@ -14,8 +14,9 @@ from app.schemas import (
|
||||
SampleResult,
|
||||
ExperimentParametersCreate,
|
||||
ExperimentParametersRead,
|
||||
# ResultResponse,
|
||||
# ResultCreate,
|
||||
ImageInfo,
|
||||
ResultResponse,
|
||||
ResultCreate,
|
||||
)
|
||||
from app.models import (
|
||||
Puck as PuckModel,
|
||||
@ -25,7 +26,7 @@ from app.models import (
|
||||
Dewar as DewarModel,
|
||||
ExperimentParameters as ExperimentParametersModel,
|
||||
# ExperimentParameters,
|
||||
# Results,
|
||||
Results as ResultsModel,
|
||||
)
|
||||
from app.dependencies import get_db
|
||||
import logging
|
||||
@ -246,8 +247,13 @@ async def get_sample_results(active_pgroup: str, db: Session = Depends(get_db)):
|
||||
|
||||
results = []
|
||||
for sample in samples:
|
||||
# Query images associated with the sample.
|
||||
images = db.query(ImageModel).filter(ImageModel.sample_id == sample.id).all()
|
||||
# Query images associated with the sample, including the related event_type
|
||||
images = (
|
||||
db.query(ImageModel)
|
||||
.options(joinedload(ImageModel.sample_event))
|
||||
.filter(ImageModel.sample_id == sample.id)
|
||||
.all()
|
||||
)
|
||||
|
||||
# Query experiment parameters (which include beamline parameters) for the
|
||||
# sample.
|
||||
@ -259,27 +265,34 @@ async def get_sample_results(active_pgroup: str, db: Session = Depends(get_db)):
|
||||
print("Experiment Parameters for sample", sample.id, experiment_parameters)
|
||||
|
||||
results.append(
|
||||
{
|
||||
"sample_id": sample.id,
|
||||
"sample_name": sample.sample_name,
|
||||
"puck_name": sample.puck.puck_name if sample.puck else None,
|
||||
"dewar_name": sample.puck.dewar.dewar_name
|
||||
SampleResult(
|
||||
sample_id=sample.id,
|
||||
sample_name=sample.sample_name,
|
||||
puck_name=sample.puck.puck_name if sample.puck else None,
|
||||
dewar_name=sample.puck.dewar.dewar_name
|
||||
if (sample.puck and sample.puck.dewar)
|
||||
else None,
|
||||
"images": [
|
||||
{"id": img.id, "filepath": img.filepath, "comment": img.comment}
|
||||
images=[
|
||||
ImageInfo(
|
||||
id=img.id,
|
||||
filepath=img.filepath,
|
||||
event_type=img.sample_event.event_type
|
||||
if img.sample_event
|
||||
else "Unknown",
|
||||
comment=img.comment,
|
||||
)
|
||||
for img in images
|
||||
],
|
||||
"experiment_runs": [
|
||||
{
|
||||
"id": ex.id,
|
||||
"run_number": ex.run_number,
|
||||
"beamline_parameters": ex.beamline_parameters,
|
||||
"sample_id": ex.sample_id,
|
||||
}
|
||||
experiment_runs=[
|
||||
ExperimentParametersRead(
|
||||
id=ex.id,
|
||||
run_number=ex.run_number,
|
||||
beamline_parameters=ex.beamline_parameters,
|
||||
sample_id=ex.sample_id,
|
||||
)
|
||||
for ex in experiment_parameters
|
||||
],
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
return results
|
||||
@ -318,32 +331,49 @@ def create_experiment_parameters_for_sample(
|
||||
db.commit()
|
||||
db.refresh(new_exp)
|
||||
|
||||
# Create a "Collecting" sample event associated with the new experiment parameters
|
||||
new_event = SampleEventModel(
|
||||
sample_id=sample_id,
|
||||
event_type="Collecting", # The event type
|
||||
timestamp=datetime.now(), # Use current timestamp
|
||||
)
|
||||
db.add(new_event)
|
||||
db.commit()
|
||||
|
||||
return new_exp
|
||||
|
||||
|
||||
# @router.post("/results", response_model=ResultResponse)
|
||||
# def create_result(result: ResultCreate, db: Session = Depends(get_db)):
|
||||
# # Validate sample_id and result_id (optional but recommended)
|
||||
# sample = db.query(SampleModel).filter_by(id=result.sample_id).first()
|
||||
# if not sample:
|
||||
# raise HTTPException(status_code=404, detail="Sample not found")
|
||||
#
|
||||
# experiment = db.query(ExperimentParameters).filter_by(id=result.result_id).first()
|
||||
# if not experiment:
|
||||
# raise HTTPException(status_code=404, detail="Experiment parameters not found")
|
||||
#
|
||||
# # Create a new Results entry
|
||||
# result_obj = Results(
|
||||
# sample_id=result.sample_id,
|
||||
# result_id=result.result_id,
|
||||
# result=result.result
|
||||
# )
|
||||
# db.add(result_obj)
|
||||
# db.commit()
|
||||
# db.refresh(result_obj)
|
||||
#
|
||||
# return result_obj
|
||||
#
|
||||
@router.post("/processing-results", response_model=ResultResponse)
|
||||
def create_result(payload: ResultCreate, db: Session = Depends(get_db)):
|
||||
# Check experiment existence
|
||||
experiment = (
|
||||
db.query(ExperimentParametersModel)
|
||||
.filter(ExperimentParametersModel.id == payload.run_id)
|
||||
.first()
|
||||
)
|
||||
if not experiment:
|
||||
raise HTTPException(
|
||||
status_code=404, detail="Experiment parameters (run) not found"
|
||||
)
|
||||
|
||||
result_entry = ResultsModel(
|
||||
sample_id=payload.sample_id,
|
||||
run_id=payload.run_id,
|
||||
result=payload.result.model_dump(), # Serialize entire result to JSON
|
||||
)
|
||||
|
||||
db.add(result_entry)
|
||||
db.commit()
|
||||
db.refresh(result_entry)
|
||||
|
||||
return ResultResponse(
|
||||
id=result_entry.id,
|
||||
sample_id=result_entry.sample_id,
|
||||
run_id=result_entry.run_id,
|
||||
result=payload.result, # return original payload directly
|
||||
)
|
||||
|
||||
|
||||
# @router.get("/results", response_model=list[ResultResponse])
|
||||
# def get_results(sample_id: int, result_id: int, db: Session = Depends(get_db)):
|
||||
# query = db.query(Results)
|
||||
|
@ -352,16 +352,6 @@ class SampleEventCreate(BaseModel):
|
||||
event_type: Literal[
|
||||
"Mounting", "Centering", "Failed", "Lost", "Collecting", "Unmounting"
|
||||
]
|
||||
# event_type: str
|
||||
# Validate event_type against accepted event types
|
||||
# @field_validator("event_type", mode="before")
|
||||
# def validate_event_type(cls, value):
|
||||
# allowed = {"Mounting", "Centering", "Failed",
|
||||
# "Lost", "Collecting", "Unmounting"}
|
||||
# if value not in allowed:
|
||||
# raise ValueError(f"Invalid event_type: {value}.
|
||||
# Accepted values are: {allowed}")
|
||||
# return value
|
||||
|
||||
|
||||
class SampleEventResponse(SampleEventCreate):
|
||||
@ -374,10 +364,7 @@ class SampleEventResponse(SampleEventCreate):
|
||||
|
||||
|
||||
class Results(BaseModel):
|
||||
id: int
|
||||
pgroup: str
|
||||
sample_id: int
|
||||
method: str
|
||||
pipeline: str
|
||||
resolution: float
|
||||
unit_cell: str
|
||||
spacegroup: str
|
||||
@ -393,10 +380,6 @@ class Results(BaseModel):
|
||||
unique_refl: int
|
||||
comments: Optional[constr(max_length=200)] = None
|
||||
|
||||
# Define attributes for Results here
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class ContactCreate(BaseModel):
|
||||
pgroups: str
|
||||
@ -822,6 +805,21 @@ class ImageInfo(BaseModel):
|
||||
id: int
|
||||
filepath: str
|
||||
comment: Optional[str] = None
|
||||
event_type: str
|
||||
# run_number: Optional[int]
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class characterizationParameters(BaseModel):
|
||||
omegaStart_deg: float
|
||||
oscillation_deg: float
|
||||
omegaStep: float
|
||||
chi: float
|
||||
phi: float
|
||||
numberOfImages: int
|
||||
exposureTime_s: float
|
||||
|
||||
|
||||
class RotationParameters(BaseModel):
|
||||
@ -882,6 +880,7 @@ class BeamlineParameters(BaseModel):
|
||||
beamSizeWidth: Optional[float] = None
|
||||
beamSizeHeight: Optional[float] = None
|
||||
# dose_MGy: float
|
||||
characterization: Optional[characterizationParameters] = None
|
||||
rotation: Optional[RotationParameters] = None
|
||||
gridScan: Optional[gridScanParamers] = None
|
||||
jet: Optional[jetParameters] = None
|
||||
@ -922,15 +921,18 @@ class SampleResult(BaseModel):
|
||||
|
||||
class ResultCreate(BaseModel):
|
||||
sample_id: int
|
||||
result_id: int
|
||||
result: Optional[dict]
|
||||
run_id: int
|
||||
result: Results
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class ResultResponse(BaseModel):
|
||||
id: int
|
||||
sample_id: int
|
||||
result_id: int
|
||||
result: Optional[dict]
|
||||
run_id: int
|
||||
result: Results
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
@ -156,8 +156,8 @@ def on_startup():
|
||||
load_slots_data(db)
|
||||
else: # dev or test environments
|
||||
print(f"{environment.capitalize()} environment: Regenerating database.")
|
||||
Base.metadata.drop_all(bind=engine)
|
||||
Base.metadata.create_all(bind=engine)
|
||||
# Base.metadata.drop_all(bind=engine)
|
||||
# Base.metadata.create_all(bind=engine)
|
||||
# from sqlalchemy.engine import reflection
|
||||
# from app.models import ExperimentParameters # adjust the import as needed
|
||||
# inspector = reflection.Inspector.from_engine(engine)
|
||||
|
0
backend/tests/test_results.py
Normal file
0
backend/tests/test_results.py
Normal file
@ -171,12 +171,12 @@ async function fetchAndGenerate() {
|
||||
const backendDirectory = (() => {
|
||||
switch (nodeEnv) {
|
||||
case 'prod':
|
||||
return path.resolve('/home/jungfrau/heidi-v2/backend/app'); // Production path
|
||||
return path.resolve('/home/jungfrau/aaredb/backend/app'); // Production path
|
||||
case 'test':
|
||||
return path.resolve('/home/jungfrau/heidi-v2/backend/app'); // Test path
|
||||
return path.resolve('/home/jungfrau/aaredb/backend/app'); // Test path
|
||||
case 'dev':
|
||||
default:
|
||||
return path.resolve('/Users/gotthardg/PycharmProjects/heidi-v2/backend/app'); // Development path
|
||||
return path.resolve('/Users/gotthardg/PycharmProjects/aaredb/backend/app'); // Development path
|
||||
}
|
||||
})();
|
||||
|
||||
|
@ -11,6 +11,8 @@ interface ImageInfo {
|
||||
id: number;
|
||||
filepath: string;
|
||||
comment?: string;
|
||||
event_type: string;
|
||||
run_number?:number;
|
||||
}
|
||||
|
||||
// This represents an experiment run as returned by your API.
|
||||
@ -87,7 +89,7 @@ interface TreeRow {
|
||||
sample_name?: string;
|
||||
puck_name?: string;
|
||||
dewar_name?: string;
|
||||
images?: ImageInfo[];
|
||||
images?: ImageInfo[]; // Images associated explicitly with this row (especially run items)
|
||||
run_number?: number;
|
||||
beamline_parameters?: ExperimentParameters['beamline_parameters'];
|
||||
experimentType?: string;
|
||||
@ -176,7 +178,13 @@ const ResultGrid: React.FC<ResultGridProps> = ({ activePgroup }) => {
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
// Fetch sample details and construct rows
|
||||
if (!OpenAPI.BASE) {
|
||||
console.error('OpenAPI.BASE is not set. Falling back to a default value.');
|
||||
return;
|
||||
}
|
||||
|
||||
setBasePath(`${OpenAPI.BASE}/`);
|
||||
|
||||
SamplesService.getSampleResultsSamplesResultsGet(activePgroup)
|
||||
.then((response: SampleResult[]) => {
|
||||
const treeRows: TreeRow[] = [];
|
||||
@ -190,28 +198,27 @@ const ResultGrid: React.FC<ResultGridProps> = ({ activePgroup }) => {
|
||||
sample_name: sample.sample_name,
|
||||
puck_name: sample.puck_name,
|
||||
dewar_name: sample.dewar_name,
|
||||
images: sample.images,
|
||||
images: sample.images.filter(img => img.event_type === "Centering"),
|
||||
};
|
||||
treeRows.push(sampleRow);
|
||||
|
||||
if (sample.experiment_runs) {
|
||||
sample.experiment_runs.forEach((run) => {
|
||||
const experimentType = getExperimentType(run);
|
||||
const numImages = getNumberOfImages(run);
|
||||
const runRow: TreeRow = {
|
||||
id: `run-${sample.sample_id}-${run.run_number}`,
|
||||
hierarchy: [sample.sample_id, run.run_number],
|
||||
type: 'run',
|
||||
sample_id: sample.sample_id,
|
||||
run_number: run.run_number,
|
||||
beamline_parameters: run.beamline_parameters,
|
||||
experimentType,
|
||||
numberOfImages: numImages,
|
||||
images: sample.images,
|
||||
};
|
||||
treeRows.push(runRow);
|
||||
});
|
||||
}
|
||||
sample.experiment_runs?.forEach(run => {
|
||||
const experimentType = getExperimentType(run);
|
||||
const numImages = getNumberOfImages(run);
|
||||
const runRow: TreeRow = {
|
||||
id: `run-${sample.sample_id}-${run.run_number}`,
|
||||
hierarchy: [sample.sample_id, run.run_number],
|
||||
type: 'run',
|
||||
sample_id: sample.sample_id,
|
||||
run_number: run.run_number,
|
||||
beamline_parameters: run.beamline_parameters,
|
||||
experimentType,
|
||||
numberOfImages: numImages,
|
||||
images: sample.images.filter(img =>
|
||||
img.event_type === "Collecting" ),
|
||||
};
|
||||
treeRows.push(runRow);
|
||||
});
|
||||
});
|
||||
|
||||
setRows(treeRows);
|
||||
@ -221,6 +228,7 @@ const ResultGrid: React.FC<ResultGridProps> = ({ activePgroup }) => {
|
||||
});
|
||||
}, [activePgroup]);
|
||||
|
||||
|
||||
// Define the grid columns
|
||||
const columns: GridColDef[] = [
|
||||
{
|
||||
|
1070
testfunctions.ipynb
1070
testfunctions.ipynb
File diff suppressed because it is too large
Load Diff
Loading…
x
Reference in New Issue
Block a user