Refine event types and update related models and logic

Standardized event types with stricter validation using `Literal`. Adjusted related data and logic to align with new types, including changes to PGROUP assignments, event timeline increments, and schema updates. Cleaned up unused code and clarified database initialization behavior.
This commit is contained in:
GotthardG 2025-03-14 13:11:05 +01:00
parent f41262575e
commit fbc32474ff
9 changed files with 267 additions and 165 deletions

View File

@ -191,7 +191,7 @@ def generate_unique_id(length=16):
dewars = [ dewars = [
Dewar( Dewar(
id=1, id=1,
pgroups="p20001, p20002", pgroups="p20001",
dewar_name="Dewar One", dewar_name="Dewar One",
dewar_type_id=1, dewar_type_id=1,
dewar_serial_number_id=2, dewar_serial_number_id=2,
@ -207,7 +207,7 @@ dewars = [
), ),
Dewar( Dewar(
id=2, id=2,
pgroups="p20001, p20002", pgroups="p20002",
dewar_name="Dewar Two", dewar_name="Dewar Two",
dewar_type_id=3, dewar_type_id=3,
dewar_serial_number_id=1, dewar_serial_number_id=1,
@ -255,7 +255,7 @@ dewars = [
), ),
Dewar( Dewar(
id=5, id=5,
pgroups="p20001, p20002", pgroups="p20003",
dewar_name="Dewar Five", dewar_name="Dewar Five",
dewar_type_id=1, dewar_type_id=1,
dewar_serial_number_id=1, dewar_serial_number_id=1,
@ -692,10 +692,10 @@ for puck in pucks:
sample_id_counter += 1 sample_id_counter += 1
# Define possible event types for samples # Define possible event types for samples
event_types = ["Mounted", "Failed", "Unmounted", "Lost"] event_types = ["Mounting", "Failed", "Unmounting", "Lost"]
def generate_sample_events(samples, chance_no_event=0.2, chance_lost=0.1): def generate_sample_events(samples, chance_no_event=0.02, chance_lost=0.01):
"""Generate events for samples with timestamps """Generate events for samples with timestamps
increasing between different samples.""" increasing between different samples."""
@ -711,11 +711,11 @@ def generate_sample_events(samples, chance_no_event=0.2, chance_lost=0.1):
# Skip some samples with no events # Skip some samples with no events
if random.random() < chance_no_event: if random.random() < chance_no_event:
# Increment start_time for the next sample to reflect time passage # Increment start_time for the next sample to reflect time passage
start_time += timedelta(minutes=10) start_time += timedelta(minutes=1)
continue continue
# Determine initial event type # Determine initial event type
event_type = "Failed" if random.random() < 0.05 else "Mounted" event_type = "Failed" if random.random() < 0.005 else "Mounting"
# Append the initial event # Append the initial event
events.append( events.append(
@ -728,7 +728,7 @@ def generate_sample_events(samples, chance_no_event=0.2, chance_lost=0.1):
) # Increment the time for subsequent events ) # Increment the time for subsequent events
# Proceed if mounted and it's not the last sample # Proceed if mounted and it's not the last sample
if event_type == "Mounted" and sample is not samples[-1]: if event_type == "Mounting" and sample is not samples[-1]:
# Determine follow-up event # Determine follow-up event
if random.random() < chance_lost: if random.random() < chance_lost:
events.append( events.append(
@ -740,13 +740,13 @@ def generate_sample_events(samples, chance_no_event=0.2, chance_lost=0.1):
events.append( events.append(
SampleEvent( SampleEvent(
sample_id=sample.id, sample_id=sample.id,
event_type="Unmounted", event_type="Unmounting",
timestamp=current_time, timestamp=current_time,
) )
) )
# Increment start_time for the next sample # Increment start_time for the next sample
start_time += timedelta(minutes=10) start_time += timedelta(minutes=1)
return events return events

View File

@ -169,6 +169,7 @@ class Sample(Base):
puck_id = Column(Integer, ForeignKey("pucks.id")) puck_id = Column(Integer, ForeignKey("pucks.id"))
puck = relationship("Puck", back_populates="samples") puck = relationship("Puck", back_populates="samples")
events = relationship("SampleEvent", back_populates="sample", lazy="joined") events = relationship("SampleEvent", back_populates="sample", lazy="joined")
images = relationship("Image", back_populates="sample", lazy="joined")
@property @property
def mount_count(self) -> int: def mount_count(self) -> int:
@ -215,6 +216,7 @@ class SampleEvent(Base):
event_type = Column(String(255), nullable=False) event_type = Column(String(255), nullable=False)
timestamp = Column(DateTime, default=datetime.now) timestamp = Column(DateTime, default=datetime.now)
images = relationship("Image", back_populates="sample_event")
sample = relationship("Sample", back_populates="events") sample = relationship("Sample", back_populates="events")
@ -258,6 +260,10 @@ class Image(Base):
filepath = Column(String(255), nullable=False) filepath = Column(String(255), nullable=False)
status = Column(String(255), nullable=True) status = Column(String(255), nullable=True)
sample_id = Column(Integer, ForeignKey("samples.id"), nullable=False) sample_id = Column(Integer, ForeignKey("samples.id"), nullable=False)
sample_event_id = Column(Integer, ForeignKey("sample_events.id"), nullable=False)
sample = relationship("Sample", back_populates="images")
sample_event = relationship("SampleEvent", back_populates="images")
class ExperimentParameters(Base): class ExperimentParameters(Base):
@ -268,12 +274,16 @@ class ExperimentParameters(Base):
sample_id = Column(Integer, ForeignKey("samples.id"), nullable=False) sample_id = Column(Integer, ForeignKey("samples.id"), nullable=False)
# class Results(Base): class Results(Base):
# __tablename__ = "results" __tablename__ = "results"
#
# id = Column(Integer, primary_key=True, index=True, autoincrement=True) id = Column(Integer, primary_key=True, index=True, autoincrement=True)
# pgroup = Column(String(255), nullable=False) # pgroup = Column(String(255), nullable=False)
# sample_id = Column(Integer, ForeignKey("samples.id"), nullable=False) result = Column(JSON, nullable=True)
result_id = Column(Integer, ForeignKey("experiment_parameters.id"), nullable=False)
sample_id = Column(Integer, ForeignKey("samples.id"), nullable=False)
# method = Column(String(255), nullable=False) # method = Column(String(255), nullable=False)
# #resolution: Column(Float(255), nullable=False) # #resolution: Column(Float(255), nullable=False)
# unit_cell: str # unit_cell: str

View File

@ -14,6 +14,8 @@ from app.schemas import (
SampleResult, SampleResult,
ExperimentParametersCreate, ExperimentParametersCreate,
ExperimentParametersRead, ExperimentParametersRead,
# ResultResponse,
# ResultCreate,
) )
from app.models import ( from app.models import (
Puck as PuckModel, Puck as PuckModel,
@ -22,6 +24,8 @@ from app.models import (
Image as ImageModel, Image as ImageModel,
Dewar as DewarModel, Dewar as DewarModel,
ExperimentParameters as ExperimentParametersModel, ExperimentParameters as ExperimentParametersModel,
# ExperimentParameters,
# Results,
) )
from app.dependencies import get_db from app.dependencies import get_db
import logging import logging
@ -132,6 +136,32 @@ async def upload_sample_image(
if not sample: if not sample:
raise HTTPException(status_code=404, detail="Sample not found") raise HTTPException(status_code=404, detail="Sample not found")
# Retrieve the most recent sample event for the sample
sample_event = (
db.query(SampleEventModel)
.filter(SampleEventModel.sample_id == sample_id)
.order_by(SampleEventModel.timestamp.desc()) # Sort by most recent event
.first()
)
if not sample_event:
logging.debug(f"No events found for sample with id: {sample_id}")
raise HTTPException(
status_code=404, detail="No events found for the specified sample"
)
# Log the found sample event for debugging
logging.debug(
f"Most recent event found for sample_id {sample_id}: "
f"event_id={sample_event.id}, "
f"type={sample_event.event_type}, "
f"timestamp={sample_event.timestamp}"
)
# Extract event type and timestamp for directory structure
event_type = sample_event.event_type
event_timestamp = sample_event.timestamp.strftime("%Y-%m-%d_%H-%M-%S")
# Define Directory Structure # Define Directory Structure
pgroup = sample.puck.dewar.pgroups # adjust to sample or puck pgroups as needed pgroup = sample.puck.dewar.pgroups # adjust to sample or puck pgroups as needed
today = datetime.now().strftime("%Y-%m-%d") today = datetime.now().strftime("%Y-%m-%d")
@ -142,7 +172,12 @@ async def upload_sample_image(
) )
puck_name = sample.puck.puck_name if sample.puck else "default_puck" puck_name = sample.puck.puck_name if sample.puck else "default_puck"
position = sample.position if sample.position else "default_position" position = sample.position if sample.position else "default_position"
base_dir = Path(f"images/{pgroup}/{today}/{dewar_name}/{puck_name}/{position}")
# Add 'run/event' specific details to the folder structure
base_dir = Path(
f"images/{pgroup}/{today}/{dewar_name}/{puck_name}/"
f"{position}/{event_type}_{event_timestamp}"
)
base_dir.mkdir(parents=True, exist_ok=True) base_dir.mkdir(parents=True, exist_ok=True)
# Validate MIME type and Save the File # Validate MIME type and Save the File
@ -150,7 +185,7 @@ async def upload_sample_image(
raise HTTPException( raise HTTPException(
status_code=400, status_code=400,
detail=f"Invalid file type: {uploaded_file.filename}." detail=f"Invalid file type: {uploaded_file.filename}."
f" Only images are accepted.", f"Only images are accepted.",
) )
file_path = base_dir / uploaded_file.filename file_path = base_dir / uploaded_file.filename
@ -164,8 +199,8 @@ async def upload_sample_image(
logging.error(f"Error saving file {uploaded_file.filename}: {str(e)}") logging.error(f"Error saving file {uploaded_file.filename}: {str(e)}")
raise HTTPException( raise HTTPException(
status_code=500, status_code=500,
detail=f"Could not save file {uploaded_file.filename}." detail=f"Could not save file {uploaded_file.filename}. "
f" Ensure the server has correct permissions.", f"Ensure the server has correct permissions.",
) )
# Create the payload from the Pydantic schema # Create the payload from the Pydantic schema
@ -175,19 +210,20 @@ async def upload_sample_image(
filepath=str(file_path), filepath=str(file_path),
status="active", status="active",
sample_id=sample_id, sample_id=sample_id,
sample_event_id=int(sample_event.id), # Link to the most recent sample event
).dict() ).dict()
# Convert the payload to your mapped SQLAlchemy model instance. # Convert the payload to your mapped SQLAlchemy model instance
# Make sure that ImageModel is your mapped model for images.
new_image = ImageModel(**image_payload) new_image = ImageModel(**image_payload)
db.add(new_image) db.add(new_image)
db.commit() db.commit()
db.refresh(new_image) db.refresh(new_image)
logging.info( logging.info(
f"Uploaded 1 file for sample {sample_id} and" f"Uploaded 1 file for sample {sample_id} and event {sample_event.id} and "
f" added record {new_image.id} to the database." f"added record {new_image.id} to the database."
) )
# Returning the mapped SQLAlchemy object, which will be converted to the # Returning the mapped SQLAlchemy object, which will be converted to the
# Pydantic response model. # Pydantic response model.
return new_image return new_image
@ -283,3 +319,42 @@ def create_experiment_parameters_for_sample(
db.refresh(new_exp) db.refresh(new_exp)
return new_exp return new_exp
# @router.post("/results", response_model=ResultResponse)
# def create_result(result: ResultCreate, db: Session = Depends(get_db)):
# # Validate sample_id and result_id (optional but recommended)
# sample = db.query(SampleModel).filter_by(id=result.sample_id).first()
# if not sample:
# raise HTTPException(status_code=404, detail="Sample not found")
#
# experiment = db.query(ExperimentParameters).filter_by(id=result.result_id).first()
# if not experiment:
# raise HTTPException(status_code=404, detail="Experiment parameters not found")
#
# # Create a new Results entry
# result_obj = Results(
# sample_id=result.sample_id,
# result_id=result.result_id,
# result=result.result
# )
# db.add(result_obj)
# db.commit()
# db.refresh(result_obj)
#
# return result_obj
#
# @router.get("/results", response_model=list[ResultResponse])
# def get_results(sample_id: int, result_id: int, db: Session = Depends(get_db)):
# query = db.query(Results)
#
# if sample_id:
# query = query.filter(Results.sample_id == sample_id)
# if result_id:
# query = query.filter(Results.result_id == result_id)
#
# results = query.all()
# if not results:
# raise HTTPException(status_code=404, detail="No results found")
#
# return results

View File

@ -1,4 +1,4 @@
from typing import List, Optional, Union from typing import List, Optional, Union, Literal
from datetime import datetime from datetime import datetime
from pydantic import BaseModel, EmailStr, constr, Field, field_validator from pydantic import BaseModel, EmailStr, constr, Field, field_validator
from datetime import date from datetime import date
@ -349,13 +349,24 @@ class DataCollectionParameters(BaseModel):
class SampleEventCreate(BaseModel): class SampleEventCreate(BaseModel):
event_type: str event_type: Literal[
"Mounting", "Centering", "Failed", "Lost", "Collecting", "Unmounting"
]
# event_type: str
# Validate event_type against accepted event types
# @field_validator("event_type", mode="before")
# def validate_event_type(cls, value):
# allowed = {"Mounting", "Centering", "Failed",
# "Lost", "Collecting", "Unmounting"}
# if value not in allowed:
# raise ValueError(f"Invalid event_type: {value}.
# Accepted values are: {allowed}")
# return value
class SampleEventResponse(BaseModel): class SampleEventResponse(SampleEventCreate):
id: int id: int
sample_id: int sample_id: int
event_type: str
timestamp: datetime timestamp: datetime
class Config: class Config:
@ -791,6 +802,7 @@ class Beamtime(BaseModel):
class ImageCreate(BaseModel): class ImageCreate(BaseModel):
pgroup: str pgroup: str
sample_id: int sample_id: int
sample_event_id: int
filepath: str filepath: str
status: str = "active" status: str = "active"
comment: Optional[str] = None comment: Optional[str] = None
@ -906,3 +918,19 @@ class SampleResult(BaseModel):
dewar_name: Optional[str] dewar_name: Optional[str]
images: List[ImageInfo] images: List[ImageInfo]
experiment_runs: Optional[List[ExperimentParametersRead]] = [] experiment_runs: Optional[List[ExperimentParametersRead]] = []
class ResultCreate(BaseModel):
sample_id: int
result_id: int
result: Optional[dict]
class ResultResponse(BaseModel):
id: int
sample_id: int
result_id: int
result: Optional[dict]
class Config:
from_attributes = True

View File

@ -134,17 +134,17 @@ def on_startup():
if environment == "prod": if environment == "prod":
from sqlalchemy.engine import reflection from sqlalchemy.engine import reflection
# inspector = reflection.Inspector.from_engine(engine)
# tables_exist = inspector.get_table_names()
from app.models import ExperimentParameters # adjust the import as needed
inspector = reflection.Inspector.from_engine(engine) inspector = reflection.Inspector.from_engine(engine)
tables_exist = inspector.get_table_names() tables_exist = inspector.get_table_names()
# from app.models import ExperimentParameters # adjust the import as needed
if ExperimentParameters.__tablename__ not in tables_exist: #
print("Creating missing table: ExperimentParameters") # inspector = reflection.Inspector.from_engine(engine)
ExperimentParameters.__table__.create(bind=engine) # tables_exist = inspector.get_table_names()
#
# if ExperimentParameters.__tablename__ not in tables_exist:
# print("Creating missing table: ExperimentParameters")
# ExperimentParameters.__table__.create(bind=engine)
#
# Ensure the production database is initialized # Ensure the production database is initialized
if not tables_exist: if not tables_exist:
print("Production database is empty. Initializing...") print("Production database is empty. Initializing...")
@ -156,8 +156,8 @@ def on_startup():
load_slots_data(db) load_slots_data(db)
else: # dev or test environments else: # dev or test environments
print(f"{environment.capitalize()} environment: Regenerating database.") print(f"{environment.capitalize()} environment: Regenerating database.")
# Base.metadata.drop_all(bind=engine) Base.metadata.drop_all(bind=engine)
# Base.metadata.create_all(bind=engine) Base.metadata.create_all(bind=engine)
# from sqlalchemy.engine import reflection # from sqlalchemy.engine import reflection
# from app.models import ExperimentParameters # adjust the import as needed # from app.models import ExperimentParameters # adjust the import as needed
# inspector = reflection.Inspector.from_engine(engine) # inspector = reflection.Inspector.from_engine(engine)

View File

@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project] [project]
name = "aareDB" name = "aareDB"
version = "0.1.0a24" version = "0.1.0a25"
description = "Backend for next gen sample management system" description = "Backend for next gen sample management system"
authors = [{name = "Guillaume Gotthard", email = "guillaume.gotthard@psi.ch"}] authors = [{name = "Guillaume Gotthard", email = "guillaume.gotthard@psi.ch"}]
license = {text = "MIT"} license = {text = "MIT"}

View File

@ -59,8 +59,8 @@ const SampleTracker: React.FC<SampleTrackerProps> = ({ activePgroup }) => {
const getSampleColor = (events: Event[] = []) => { const getSampleColor = (events: Event[] = []) => {
const hasMounted = events.some((e) => e.event_type === 'Mounted'); const hasMounted = events.some((e) => e.event_type === 'Mounting');
const hasUnmounted = events.some((e) => e.event_type === 'Unmounted'); const hasUnmounted = events.some((e) => e.event_type === 'Unmounting');
const hasLost = events.some((e) => e.event_type === 'Lost'); const hasLost = events.some((e) => e.event_type === 'Lost');
const hasFailed = events.some((e) => e.event_type === 'Failed'); const hasFailed = events.some((e) => e.event_type === 'Failed');

View File

@ -1,7 +1,7 @@
#!/bin/bash #!/bin/bash
# Extract values from pyproject.toml # Extract values from pyproject.toml
PYPROJECT_FILE="$(dirname "$0")/pyproject.toml" PYPROJECT_FILE="$(dirname "$0")/backend/pyproject.toml"
NAME=$(awk -F'= ' '/^name/ { print $2 }' "$PYPROJECT_FILE" | tr -d '"') NAME=$(awk -F'= ' '/^name/ { print $2 }' "$PYPROJECT_FILE" | tr -d '"')
VERSION=$(awk -F'= ' '/^version/ { print $2 }' "$PYPROJECT_FILE" | tr -d '"') VERSION=$(awk -F'= ' '/^version/ { print $2 }' "$PYPROJECT_FILE" | tr -d '"')

View File

@ -3,8 +3,8 @@
{ {
"metadata": { "metadata": {
"ExecuteTime": { "ExecuteTime": {
"end_time": "2025-03-04T09:22:38.371102Z", "end_time": "2025-03-11T15:04:51.407218Z",
"start_time": "2025-03-04T09:22:37.939673Z" "start_time": "2025-03-11T15:04:51.403058Z"
} }
}, },
"cell_type": "code", "cell_type": "code",
@ -41,12 +41,12 @@
"name": "stdout", "name": "stdout",
"output_type": "stream", "output_type": "stream",
"text": [ "text": [
"0.1.0a24\n", "0.1.0a25\n",
"https://127.0.0.1:8000\n" "https://127.0.0.1:8000\n"
] ]
} }
], ],
"execution_count": 1 "execution_count": 48
}, },
{ {
"metadata": {}, "metadata": {},
@ -328,8 +328,8 @@
{ {
"metadata": { "metadata": {
"ExecuteTime": { "ExecuteTime": {
"end_time": "2025-03-03T10:06:39.979358Z", "end_time": "2025-03-07T12:40:09.769132Z",
"start_time": "2025-03-03T10:06:39.957359Z" "start_time": "2025-03-07T12:40:09.752103Z"
} }
}, },
"cell_type": "code", "cell_type": "code",
@ -369,48 +369,6 @@
], ],
"id": "51578d944878db6a", "id": "51578d944878db6a",
"outputs": [ "outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Puck ID: 6, Puck Name: PUCK006\n",
" Sample ID: 44, Sample Name: Sample044, Position: 2, Mount count: 1\n",
" Sample ID: 45, Sample Name: Sample045, Position: 3, Mount count: 0\n",
" Sample ID: 46, Sample Name: Sample046, Position: 4, Mount count: 0\n",
" Sample ID: 47, Sample Name: Sample047, Position: 5, Mount count: 1\n",
"Puck ID: 3, Puck Name: PUCK003\n",
" Sample ID: 24, Sample Name: Sample024, Position: 1, Mount count: 0\n",
" Sample ID: 25, Sample Name: Sample025, Position: 5, Mount count: 1\n",
" Sample ID: 26, Sample Name: Sample026, Position: 8, Mount count: 1\n",
" Sample ID: 27, Sample Name: Sample027, Position: 11, Mount count: 1\n",
" Sample ID: 28, Sample Name: Sample028, Position: 12, Mount count: 1\n",
"Puck ID: 2, Puck Name: PUCK002\n",
" Sample ID: 17, Sample Name: Sample017, Position: 4, Mount count: 1\n",
" Sample ID: 18, Sample Name: Sample018, Position: 5, Mount count: 0\n",
" Sample ID: 19, Sample Name: Sample019, Position: 7, Mount count: 1\n",
" Sample ID: 20, Sample Name: Sample020, Position: 10, Mount count: 0\n",
" Sample ID: 21, Sample Name: Sample021, Position: 11, Mount count: 1\n",
" Sample ID: 22, Sample Name: Sample022, Position: 13, Mount count: 0\n",
" Sample ID: 23, Sample Name: Sample023, Position: 16, Mount count: 1\n",
"Puck ID: 1, Puck Name: PUCK-001\n",
" Sample ID: 1, Sample Name: Sample001, Position: 1, Mount count: 1\n",
" Sample ID: 2, Sample Name: Sample002, Position: 2, Mount count: 1\n",
" Sample ID: 3, Sample Name: Sample003, Position: 3, Mount count: 0\n",
" Sample ID: 4, Sample Name: Sample004, Position: 4, Mount count: 0\n",
" Sample ID: 5, Sample Name: Sample005, Position: 5, Mount count: 0\n",
" Sample ID: 6, Sample Name: Sample006, Position: 6, Mount count: 1\n",
" Sample ID: 7, Sample Name: Sample007, Position: 7, Mount count: 0\n",
" Sample ID: 8, Sample Name: Sample008, Position: 8, Mount count: 1\n",
" Sample ID: 9, Sample Name: Sample009, Position: 9, Mount count: 1\n",
" Sample ID: 10, Sample Name: Sample010, Position: 10, Mount count: 1\n",
" Sample ID: 11, Sample Name: Sample011, Position: 11, Mount count: 1\n",
" Sample ID: 12, Sample Name: Sample012, Position: 12, Mount count: 1\n",
" Sample ID: 13, Sample Name: Sample013, Position: 13, Mount count: 0\n",
" Sample ID: 14, Sample Name: Sample014, Position: 14, Mount count: 1\n",
" Sample ID: 15, Sample Name: Sample015, Position: 15, Mount count: 0\n",
" Sample ID: 16, Sample Name: Sample016, Position: 16, Mount count: 1\n"
]
},
{ {
"name": "stderr", "name": "stderr",
"output_type": "stream", "output_type": "stream",
@ -420,13 +378,13 @@
] ]
} }
], ],
"execution_count": 3 "execution_count": 2
}, },
{ {
"metadata": { "metadata": {
"ExecuteTime": { "ExecuteTime": {
"end_time": "2025-02-26T12:05:03.257159Z", "end_time": "2025-03-07T20:50:06.182786Z",
"start_time": "2025-02-26T12:05:03.232846Z" "start_time": "2025-03-07T20:50:06.165153Z"
} }
}, },
"cell_type": "code", "cell_type": "code",
@ -438,12 +396,12 @@
"with aareDBclient.ApiClient(configuration) as api_client:\n", "with aareDBclient.ApiClient(configuration) as api_client:\n",
" # Instance of the API client\n", " # Instance of the API client\n",
" api_instance = aareDBclient.SamplesApi(api_client)\n", " api_instance = aareDBclient.SamplesApi(api_client)\n",
"\n", " sample_id=258\n",
" try:\n", " try:\n",
" # Define the payload with only `event_type`\n", " # Define the payload with only `event_type`\n",
" sample_event_create = SampleEventCreate(\n", " sample_event_create = SampleEventCreate(\n",
" sample_id=16,\n", " sample_id=sample_id,\n",
" event_type=\"Mounted\" # Valid event type\n", " event_type=\"Centering\" # Valid event type\n",
" )\n", " )\n",
"\n", "\n",
" # Debug the payload before sending\n", " # Debug the payload before sending\n",
@ -452,7 +410,7 @@
"\n", "\n",
" # Call the API\n", " # Call the API\n",
" api_response = api_instance.create_sample_event_samples_samples_sample_id_events_post(\n", " api_response = api_instance.create_sample_event_samples_samples_sample_id_events_post(\n",
" sample_id=16, # Ensure this matches a valid sample ID in the database\n", " sample_id=sample_id, # Ensure this matches a valid sample ID in the database\n",
" sample_event_create=sample_event_create\n", " sample_event_create=sample_event_create\n",
" )\n", " )\n",
"\n", "\n",
@ -475,20 +433,20 @@
"output_type": "stream", "output_type": "stream",
"text": [ "text": [
"Payload being sent to API:\n", "Payload being sent to API:\n",
"{\"event_type\":\"Mounted\"}\n", "{\"event_type\":\"Centering\"}\n",
"API response:\n", "API response:\n",
"('id', 16)\n", "('id', 258)\n",
"('sample_name', 'Sample016')\n", "('sample_name', 'Sample258')\n",
"('position', 16)\n", "('position', 14)\n",
"('puck_id', 1)\n", "('puck_id', 26)\n",
"('crystalname', None)\n", "('crystalname', None)\n",
"('proteinname', None)\n", "('proteinname', None)\n",
"('positioninpuck', None)\n", "('positioninpuck', None)\n",
"('priority', None)\n", "('priority', None)\n",
"('comments', None)\n", "('comments', None)\n",
"('data_collection_parameters', None)\n", "('data_collection_parameters', None)\n",
"('events', [SampleEventResponse(id=399, sample_id=16, event_type='Mounted', timestamp=datetime.datetime(2025, 2, 26, 13, 5, 3))])\n", "('events', [SampleEventResponse(id=492, sample_id=258, event_type='Mounting', timestamp=datetime.datetime(2025, 3, 6, 13, 50)), SampleEventResponse(id=493, sample_id=258, event_type='Unmounting', timestamp=datetime.datetime(2025, 3, 6, 13, 50, 50)), SampleEventResponse(id=573, sample_id=258, event_type='Centering', timestamp=datetime.datetime(2025, 3, 7, 21, 50, 6))])\n",
"('mount_count', 1)\n", "('mount_count', 0)\n",
"('unmount_count', 0)\n" "('unmount_count', 0)\n"
] ]
}, },
@ -501,7 +459,7 @@
] ]
} }
], ],
"execution_count": 80 "execution_count": 46
}, },
{ {
"metadata": {}, "metadata": {},
@ -528,52 +486,64 @@
{ {
"metadata": { "metadata": {
"ExecuteTime": { "ExecuteTime": {
"end_time": "2025-02-26T16:15:33.052345Z", "end_time": "2025-03-07T20:51:14.203996Z",
"start_time": "2025-02-26T16:15:33.022632Z" "start_time": "2025-03-07T20:51:14.134135Z"
} }
}, },
"cell_type": "code", "cell_type": "code",
"source": [ "source": [
"# post images to sample database\n", "# Post multiple images to the sample database\n",
"\n", "\n",
"import os\n", "import os\n",
"import mimetypes\n", "import mimetypes\n",
"import requests\n", "import requests\n",
"\n", "\n",
"# File path to the image\n", "# List of file paths to the images you want to upload\n",
"file_path = \"backend/tests/sample_image/IMG_1942.jpg\"\n", "file_paths = [\n",
"filename = os.path.basename(file_path)\n", " \"backend/tests/sample_image/0_200.jpg\",\n",
"mime_type, _ = mimetypes.guess_type(file_path)\n", " \"backend/tests/sample_image/90_200.jpg\",\n",
"if mime_type is None:\n", " \"backend/tests/sample_image/0_700.jpg\",\n",
" mime_type = \"application/octet-stream\"\n", " \"backend/tests/sample_image/90_700.jpg\",\n",
"]\n",
"\n",
"#file_paths = [\"backend/tests/sample_image/mount.jpeg.jpg\"]\n",
"\n",
"\n", "\n",
"# Sample ID (ensure this exists on your backend)\n", "# Sample ID (ensure this exists on your backend)\n",
"sample_id = 16\n", "sample_id = 299\n",
"\n", "\n",
"# Build the URL for the upload endpoint.\n", "# Base URL for the upload endpoint\n",
"url = f\"https://127.0.0.1:8000/samples/{sample_id}/upload-images\"\n", "url = f\"https://127.0.0.1:8000/samples/{sample_id}/upload-images\"\n",
"\n", "\n",
"# Open the file and construct the files dictionary\n", "# Iterate through each file and upload it\n",
"with open(file_path, \"rb\") as file_data:\n", "for file_path in file_paths:\n",
" files = {\n", " # Determine file name and MIME type\n",
" # Use key \"uploaded_file\" as required by your API\n", " filename = os.path.basename(file_path)\n",
" \"uploaded_file\": (filename, file_data, mime_type)\n", " mime_type, _ = mimetypes.guess_type(file_path)\n",
" }\n", " if mime_type is None:\n",
" headers = {\n", " mime_type = \"application/octet-stream\"\n",
" \"accept\": \"application/json\"\n",
" }\n",
" comment = \"before loop centering\"\n",
"\n", "\n",
" # Set verify=False to bypass certificate verification (only use in development)\n", " # Open the file for uploading\n",
" response = requests.post(url, headers=headers, files=files, verify=False)\n", " with open(file_path, \"rb\") as file_data:\n",
" files = {\n",
" # Use key \"uploaded_file\" as required by your API\n",
" \"uploaded_file\": (filename, file_data, mime_type)\n",
" }\n",
" headers = {\n",
" \"accept\": \"application/json\"\n",
" }\n",
"\n", "\n",
"# Check the API response\n", " # Send the POST request\n",
"print(\"API Response:\")\n", " print(f\"Uploading {filename}...\")\n",
"print(response.status_code)\n", " response = requests.post(url, headers=headers, files=files, verify=False)\n",
"try:\n", "\n",
" print(response.json())\n", " # Check the API response\n",
"except Exception:\n", " print(f\"API Response for {filename}:\")\n",
" print(response.text)\n" " print(response.status_code)\n",
" try:\n",
" print(response.json())\n",
" except Exception:\n",
" print(response.text)\n"
], ],
"id": "11f62976d2e7d9b1", "id": "11f62976d2e7d9b1",
"outputs": [ "outputs": [
@ -581,21 +551,40 @@
"name": "stdout", "name": "stdout",
"output_type": "stream", "output_type": "stream",
"text": [ "text": [
"API Response:\n", "Uploading 0_200.jpg...\n",
"API Response for 0_200.jpg:\n",
"200\n", "200\n",
"{'pgroup': 'p20001', 'sample_id': 16, 'filepath': 'images/p20001/2025-02-26/Dewar One/PUCK-001/16/IMG_1942.jpg', 'status': 'active', 'comment': None, 'id': 4}\n" "{'pgroup': 'p20003', 'sample_id': 299, 'sample_event_id': 572, 'filepath': 'images/p20003/2025-03-07/Dewar Five/PKK007/16/Mounting_2025-03-06_14-31-00/0_200.jpg', 'status': 'active', 'comment': None, 'id': 2}\n",
"Uploading 90_200.jpg...\n",
"API Response for 90_200.jpg:\n",
"200\n",
"{'pgroup': 'p20003', 'sample_id': 299, 'sample_event_id': 572, 'filepath': 'images/p20003/2025-03-07/Dewar Five/PKK007/16/Mounting_2025-03-06_14-31-00/90_200.jpg', 'status': 'active', 'comment': None, 'id': 3}\n",
"Uploading 0_700.jpg...\n",
"API Response for 0_700.jpg:\n",
"200\n",
"{'pgroup': 'p20003', 'sample_id': 299, 'sample_event_id': 572, 'filepath': 'images/p20003/2025-03-07/Dewar Five/PKK007/16/Mounting_2025-03-06_14-31-00/0_700.jpg', 'status': 'active', 'comment': None, 'id': 4}\n",
"Uploading 90_700.jpg...\n",
"API Response for 90_700.jpg:\n",
"200\n",
"{'pgroup': 'p20003', 'sample_id': 299, 'sample_event_id': 572, 'filepath': 'images/p20003/2025-03-07/Dewar Five/PKK007/16/Mounting_2025-03-06_14-31-00/90_700.jpg', 'status': 'active', 'comment': None, 'id': 5}\n"
] ]
}, },
{ {
"name": "stderr", "name": "stderr",
"output_type": "stream", "output_type": "stream",
"text": [ "text": [
"/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/urllib3/connectionpool.py:1097: InsecureRequestWarning: Unverified HTTPS request is being made to host '127.0.0.1'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings\n",
" warnings.warn(\n",
"/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/urllib3/connectionpool.py:1097: InsecureRequestWarning: Unverified HTTPS request is being made to host '127.0.0.1'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings\n",
" warnings.warn(\n",
"/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/urllib3/connectionpool.py:1097: InsecureRequestWarning: Unverified HTTPS request is being made to host '127.0.0.1'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings\n",
" warnings.warn(\n",
"/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/urllib3/connectionpool.py:1097: InsecureRequestWarning: Unverified HTTPS request is being made to host '127.0.0.1'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings\n", "/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/urllib3/connectionpool.py:1097: InsecureRequestWarning: Unverified HTTPS request is being made to host '127.0.0.1'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings\n",
" warnings.warn(\n" " warnings.warn(\n"
] ]
} }
], ],
"execution_count": 89 "execution_count": 47
}, },
{ {
"metadata": {}, "metadata": {},
@ -608,8 +597,8 @@
{ {
"metadata": { "metadata": {
"ExecuteTime": { "ExecuteTime": {
"end_time": "2025-03-04T09:17:34.613556Z", "end_time": "2025-03-11T15:05:58.348843Z",
"start_time": "2025-03-04T09:17:34.599367Z" "start_time": "2025-03-11T15:05:58.331240Z"
} }
}, },
"cell_type": "code", "cell_type": "code",
@ -624,27 +613,27 @@
")\n", ")\n",
"\n", "\n",
"\n", "\n",
"## Build the nested parameters\n", "# Build the nested parameters\n",
"#rotation = RotationParameters(\n", "rotation = RotationParameters(\n",
"# omegaStart_deg=0.0,\n", " omegaStart_deg=0.0,\n",
"# omegaStep=90.0,\n", " omegaStep=90.0,\n",
"# phi=10.0,\n", " phi=10.0,\n",
"# chi=0.0,\n", " chi=0.0,\n",
"# numberOfImages=4,\n", " numberOfImages=4,\n",
"# exposureTime_s=0.02\n", " exposureTime_s=0.02\n",
"#)\n",
"\n",
"gridscan = GridScanParamers(\n",
" xStart=0.0,\n",
" xStep=0.1,\n",
" yStart=0.0,\n",
" yStep= 0.1,\n",
" zStart=0.0,\n",
" zStep=0.0,\n",
" numberOfImages=4600,\n",
" exposureTime_s=0.001\n",
")\n", ")\n",
"\n", "\n",
"#gridscan = GridScanParamers(\n",
"# xStart=0.0,\n",
"# xStep=0.1,\n",
"# yStart=0.0,\n",
"# yStep= 0.1,\n",
"# zStart=0.0,\n",
"# zStep=0.0,\n",
"# numberOfImages=4600,\n",
"# exposureTime_s=0.001\n",
"#)\n",
"\n",
"# If your client code requires you to build a detector model,\n", "# If your client code requires you to build a detector model,\n",
"# you can either use a Detector model or pass a dictionary.\n", "# you can either use a Detector model or pass a dictionary.\n",
"# Here we pass a dictionary.\n", "# Here we pass a dictionary.\n",
@ -673,8 +662,8 @@
" beamlineFluxAtSample_ph_s=0,\n", " beamlineFluxAtSample_ph_s=0,\n",
" beamSizeWidth=30.0,\n", " beamSizeWidth=30.0,\n",
" beamSizeHeight=30.0,\n", " beamSizeHeight=30.0,\n",
"# rotation=rotation # Optional nested parameter\n", " rotation=rotation # Optional nested parameter\n",
" gridScan=gridscan\n", " #gridScan=gridscan\n",
" # gridScan and jet are optional and can be added similarly\n", " # gridScan and jet are optional and can be added similarly\n",
")\n", ")\n",
"\n", "\n",
@ -684,7 +673,7 @@
"experiment_params_payload = ExperimentParametersCreate(\n", "experiment_params_payload = ExperimentParametersCreate(\n",
" # run_number can be omitted/ignored if computed on the server\n", " # run_number can be omitted/ignored if computed on the server\n",
" beamline_parameters=beamline_params,\n", " beamline_parameters=beamline_params,\n",
" sample_id=15 # change sample_id to an existing sample in your database\n", " sample_id=299 # change sample_id to an existing sample in your database\n",
")\n", ")\n",
"\n", "\n",
"# Now, use the API instance to send the POST request\n", "# Now, use the API instance to send the POST request\n",
@ -711,7 +700,7 @@
"output_type": "stream", "output_type": "stream",
"text": [ "text": [
"API Response:\n", "API Response:\n",
"run_number=3 beamline_parameters=BeamlineParameters(synchrotron='Swiss Light Source', beamline='PXIII', detector=Detector(manufacturer='DECTRIS', model='PILATUS4 2M', type='photon-counting', serial_number='16684dscsd668468', detector_distance_mm=95.0, beam_center_x_px=512.0, beam_center_y_px=512.0, pixel_size_x_um=150.0, pixel_size_y_um=150.0), wavelength=1.0, ring_current_a=0.0, ring_mode='Machine Down', undulator=None, undulatorgap_mm=None, monochromator='Si111', transmission=1.0, focusing_optic='Kirkpatrick-Baez', beamline_flux_at_sample_ph_s=0.0, beam_size_width=30.0, beam_size_height=30.0, rotation=None, grid_scan=GridScanParamers(x_start=0.0, x_step=0.1, y_start=0.0, y_step=0.1, z_start=0.0, z_step=0.0, number_of_images=4600, exposure_time_s=0.001), jet=None, cryojet_temperature_k=None, humidifier_temperature_k=None, humidifier_humidity=None) sample_id=15 id=10\n" "run_number=1 beamline_parameters=BeamlineParameters(synchrotron='Swiss Light Source', beamline='PXIII', detector=Detector(manufacturer='DECTRIS', model='PILATUS4 2M', type='photon-counting', serial_number='16684dscsd668468', detector_distance_mm=95.0, beam_center_x_px=512.0, beam_center_y_px=512.0, pixel_size_x_um=150.0, pixel_size_y_um=150.0), wavelength=1.0, ring_current_a=0.0, ring_mode='Machine Down', undulator=None, undulatorgap_mm=None, monochromator='Si111', transmission=1.0, focusing_optic='Kirkpatrick-Baez', beamline_flux_at_sample_ph_s=0.0, beam_size_width=30.0, beam_size_height=30.0, rotation=RotationParameters(omega_start_deg=0.0, omega_step=90.0, chi=0.0, phi=10.0, number_of_images=4, exposure_time_s=0.02), grid_scan=None, jet=None, cryojet_temperature_k=None, humidifier_temperature_k=None, humidifier_humidity=None) sample_id=299 id=1\n"
] ]
}, },
{ {
@ -723,7 +712,7 @@
] ]
} }
], ],
"execution_count": 12 "execution_count": 49
}, },
{ {
"metadata": { "metadata": {