Refine event types and update related models and logic

Standardized event types with stricter validation using `Literal`. Adjusted related data and logic to align with new types, including changes to PGROUP assignments, event timeline increments, and schema updates. Cleaned up unused code and clarified database initialization behavior.
This commit is contained in:
GotthardG 2025-03-14 13:11:05 +01:00
parent f41262575e
commit fbc32474ff
9 changed files with 267 additions and 165 deletions

View File

@ -191,7 +191,7 @@ def generate_unique_id(length=16):
dewars = [
Dewar(
id=1,
pgroups="p20001, p20002",
pgroups="p20001",
dewar_name="Dewar One",
dewar_type_id=1,
dewar_serial_number_id=2,
@ -207,7 +207,7 @@ dewars = [
),
Dewar(
id=2,
pgroups="p20001, p20002",
pgroups="p20002",
dewar_name="Dewar Two",
dewar_type_id=3,
dewar_serial_number_id=1,
@ -255,7 +255,7 @@ dewars = [
),
Dewar(
id=5,
pgroups="p20001, p20002",
pgroups="p20003",
dewar_name="Dewar Five",
dewar_type_id=1,
dewar_serial_number_id=1,
@ -692,10 +692,10 @@ for puck in pucks:
sample_id_counter += 1
# Define possible event types for samples
event_types = ["Mounted", "Failed", "Unmounted", "Lost"]
event_types = ["Mounting", "Failed", "Unmounting", "Lost"]
def generate_sample_events(samples, chance_no_event=0.2, chance_lost=0.1):
def generate_sample_events(samples, chance_no_event=0.02, chance_lost=0.01):
"""Generate events for samples with timestamps
increasing between different samples."""
@ -711,11 +711,11 @@ def generate_sample_events(samples, chance_no_event=0.2, chance_lost=0.1):
# Skip some samples with no events
if random.random() < chance_no_event:
# Increment start_time for the next sample to reflect time passage
start_time += timedelta(minutes=10)
start_time += timedelta(minutes=1)
continue
# Determine initial event type
event_type = "Failed" if random.random() < 0.05 else "Mounted"
event_type = "Failed" if random.random() < 0.005 else "Mounting"
# Append the initial event
events.append(
@ -728,7 +728,7 @@ def generate_sample_events(samples, chance_no_event=0.2, chance_lost=0.1):
) # Increment the time for subsequent events
# Proceed if mounted and it's not the last sample
if event_type == "Mounted" and sample is not samples[-1]:
if event_type == "Mounting" and sample is not samples[-1]:
# Determine follow-up event
if random.random() < chance_lost:
events.append(
@ -740,13 +740,13 @@ def generate_sample_events(samples, chance_no_event=0.2, chance_lost=0.1):
events.append(
SampleEvent(
sample_id=sample.id,
event_type="Unmounted",
event_type="Unmounting",
timestamp=current_time,
)
)
# Increment start_time for the next sample
start_time += timedelta(minutes=10)
start_time += timedelta(minutes=1)
return events

View File

@ -169,6 +169,7 @@ class Sample(Base):
puck_id = Column(Integer, ForeignKey("pucks.id"))
puck = relationship("Puck", back_populates="samples")
events = relationship("SampleEvent", back_populates="sample", lazy="joined")
images = relationship("Image", back_populates="sample", lazy="joined")
@property
def mount_count(self) -> int:
@ -215,6 +216,7 @@ class SampleEvent(Base):
event_type = Column(String(255), nullable=False)
timestamp = Column(DateTime, default=datetime.now)
images = relationship("Image", back_populates="sample_event")
sample = relationship("Sample", back_populates="events")
@ -258,6 +260,10 @@ class Image(Base):
filepath = Column(String(255), nullable=False)
status = Column(String(255), nullable=True)
sample_id = Column(Integer, ForeignKey("samples.id"), nullable=False)
sample_event_id = Column(Integer, ForeignKey("sample_events.id"), nullable=False)
sample = relationship("Sample", back_populates="images")
sample_event = relationship("SampleEvent", back_populates="images")
class ExperimentParameters(Base):
@ -268,12 +274,16 @@ class ExperimentParameters(Base):
sample_id = Column(Integer, ForeignKey("samples.id"), nullable=False)
# class Results(Base):
# __tablename__ = "results"
#
# id = Column(Integer, primary_key=True, index=True, autoincrement=True)
# pgroup = Column(String(255), nullable=False)
# sample_id = Column(Integer, ForeignKey("samples.id"), nullable=False)
class Results(Base):
__tablename__ = "results"
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
# pgroup = Column(String(255), nullable=False)
result = Column(JSON, nullable=True)
result_id = Column(Integer, ForeignKey("experiment_parameters.id"), nullable=False)
sample_id = Column(Integer, ForeignKey("samples.id"), nullable=False)
# method = Column(String(255), nullable=False)
# #resolution: Column(Float(255), nullable=False)
# unit_cell: str

View File

@ -14,6 +14,8 @@ from app.schemas import (
SampleResult,
ExperimentParametersCreate,
ExperimentParametersRead,
# ResultResponse,
# ResultCreate,
)
from app.models import (
Puck as PuckModel,
@ -22,6 +24,8 @@ from app.models import (
Image as ImageModel,
Dewar as DewarModel,
ExperimentParameters as ExperimentParametersModel,
# ExperimentParameters,
# Results,
)
from app.dependencies import get_db
import logging
@ -132,6 +136,32 @@ async def upload_sample_image(
if not sample:
raise HTTPException(status_code=404, detail="Sample not found")
# Retrieve the most recent sample event for the sample
sample_event = (
db.query(SampleEventModel)
.filter(SampleEventModel.sample_id == sample_id)
.order_by(SampleEventModel.timestamp.desc()) # Sort by most recent event
.first()
)
if not sample_event:
logging.debug(f"No events found for sample with id: {sample_id}")
raise HTTPException(
status_code=404, detail="No events found for the specified sample"
)
# Log the found sample event for debugging
logging.debug(
f"Most recent event found for sample_id {sample_id}: "
f"event_id={sample_event.id}, "
f"type={sample_event.event_type}, "
f"timestamp={sample_event.timestamp}"
)
# Extract event type and timestamp for directory structure
event_type = sample_event.event_type
event_timestamp = sample_event.timestamp.strftime("%Y-%m-%d_%H-%M-%S")
# Define Directory Structure
pgroup = sample.puck.dewar.pgroups # adjust to sample or puck pgroups as needed
today = datetime.now().strftime("%Y-%m-%d")
@ -142,7 +172,12 @@ async def upload_sample_image(
)
puck_name = sample.puck.puck_name if sample.puck else "default_puck"
position = sample.position if sample.position else "default_position"
base_dir = Path(f"images/{pgroup}/{today}/{dewar_name}/{puck_name}/{position}")
# Add 'run/event' specific details to the folder structure
base_dir = Path(
f"images/{pgroup}/{today}/{dewar_name}/{puck_name}/"
f"{position}/{event_type}_{event_timestamp}"
)
base_dir.mkdir(parents=True, exist_ok=True)
# Validate MIME type and Save the File
@ -150,7 +185,7 @@ async def upload_sample_image(
raise HTTPException(
status_code=400,
detail=f"Invalid file type: {uploaded_file.filename}."
f" Only images are accepted.",
f"Only images are accepted.",
)
file_path = base_dir / uploaded_file.filename
@ -164,8 +199,8 @@ async def upload_sample_image(
logging.error(f"Error saving file {uploaded_file.filename}: {str(e)}")
raise HTTPException(
status_code=500,
detail=f"Could not save file {uploaded_file.filename}."
f" Ensure the server has correct permissions.",
detail=f"Could not save file {uploaded_file.filename}. "
f"Ensure the server has correct permissions.",
)
# Create the payload from the Pydantic schema
@ -175,19 +210,20 @@ async def upload_sample_image(
filepath=str(file_path),
status="active",
sample_id=sample_id,
sample_event_id=int(sample_event.id), # Link to the most recent sample event
).dict()
# Convert the payload to your mapped SQLAlchemy model instance.
# Make sure that ImageModel is your mapped model for images.
# Convert the payload to your mapped SQLAlchemy model instance
new_image = ImageModel(**image_payload)
db.add(new_image)
db.commit()
db.refresh(new_image)
logging.info(
f"Uploaded 1 file for sample {sample_id} and"
f" added record {new_image.id} to the database."
f"Uploaded 1 file for sample {sample_id} and event {sample_event.id} and "
f"added record {new_image.id} to the database."
)
# Returning the mapped SQLAlchemy object, which will be converted to the
# Pydantic response model.
return new_image
@ -283,3 +319,42 @@ def create_experiment_parameters_for_sample(
db.refresh(new_exp)
return new_exp
# @router.post("/results", response_model=ResultResponse)
# def create_result(result: ResultCreate, db: Session = Depends(get_db)):
# # Validate sample_id and result_id (optional but recommended)
# sample = db.query(SampleModel).filter_by(id=result.sample_id).first()
# if not sample:
# raise HTTPException(status_code=404, detail="Sample not found")
#
# experiment = db.query(ExperimentParameters).filter_by(id=result.result_id).first()
# if not experiment:
# raise HTTPException(status_code=404, detail="Experiment parameters not found")
#
# # Create a new Results entry
# result_obj = Results(
# sample_id=result.sample_id,
# result_id=result.result_id,
# result=result.result
# )
# db.add(result_obj)
# db.commit()
# db.refresh(result_obj)
#
# return result_obj
#
# @router.get("/results", response_model=list[ResultResponse])
# def get_results(sample_id: int, result_id: int, db: Session = Depends(get_db)):
# query = db.query(Results)
#
# if sample_id:
# query = query.filter(Results.sample_id == sample_id)
# if result_id:
# query = query.filter(Results.result_id == result_id)
#
# results = query.all()
# if not results:
# raise HTTPException(status_code=404, detail="No results found")
#
# return results

View File

@ -1,4 +1,4 @@
from typing import List, Optional, Union
from typing import List, Optional, Union, Literal
from datetime import datetime
from pydantic import BaseModel, EmailStr, constr, Field, field_validator
from datetime import date
@ -349,13 +349,24 @@ class DataCollectionParameters(BaseModel):
class SampleEventCreate(BaseModel):
event_type: str
event_type: Literal[
"Mounting", "Centering", "Failed", "Lost", "Collecting", "Unmounting"
]
# event_type: str
# Validate event_type against accepted event types
# @field_validator("event_type", mode="before")
# def validate_event_type(cls, value):
# allowed = {"Mounting", "Centering", "Failed",
# "Lost", "Collecting", "Unmounting"}
# if value not in allowed:
# raise ValueError(f"Invalid event_type: {value}.
# Accepted values are: {allowed}")
# return value
class SampleEventResponse(BaseModel):
class SampleEventResponse(SampleEventCreate):
id: int
sample_id: int
event_type: str
timestamp: datetime
class Config:
@ -791,6 +802,7 @@ class Beamtime(BaseModel):
class ImageCreate(BaseModel):
pgroup: str
sample_id: int
sample_event_id: int
filepath: str
status: str = "active"
comment: Optional[str] = None
@ -906,3 +918,19 @@ class SampleResult(BaseModel):
dewar_name: Optional[str]
images: List[ImageInfo]
experiment_runs: Optional[List[ExperimentParametersRead]] = []
class ResultCreate(BaseModel):
sample_id: int
result_id: int
result: Optional[dict]
class ResultResponse(BaseModel):
id: int
sample_id: int
result_id: int
result: Optional[dict]
class Config:
from_attributes = True

View File

@ -134,17 +134,17 @@ def on_startup():
if environment == "prod":
from sqlalchemy.engine import reflection
# inspector = reflection.Inspector.from_engine(engine)
# tables_exist = inspector.get_table_names()
from app.models import ExperimentParameters # adjust the import as needed
inspector = reflection.Inspector.from_engine(engine)
tables_exist = inspector.get_table_names()
if ExperimentParameters.__tablename__ not in tables_exist:
print("Creating missing table: ExperimentParameters")
ExperimentParameters.__table__.create(bind=engine)
# from app.models import ExperimentParameters # adjust the import as needed
#
# inspector = reflection.Inspector.from_engine(engine)
# tables_exist = inspector.get_table_names()
#
# if ExperimentParameters.__tablename__ not in tables_exist:
# print("Creating missing table: ExperimentParameters")
# ExperimentParameters.__table__.create(bind=engine)
#
# Ensure the production database is initialized
if not tables_exist:
print("Production database is empty. Initializing...")
@ -156,8 +156,8 @@ def on_startup():
load_slots_data(db)
else: # dev or test environments
print(f"{environment.capitalize()} environment: Regenerating database.")
# Base.metadata.drop_all(bind=engine)
# Base.metadata.create_all(bind=engine)
Base.metadata.drop_all(bind=engine)
Base.metadata.create_all(bind=engine)
# from sqlalchemy.engine import reflection
# from app.models import ExperimentParameters # adjust the import as needed
# inspector = reflection.Inspector.from_engine(engine)

View File

@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "aareDB"
version = "0.1.0a24"
version = "0.1.0a25"
description = "Backend for next gen sample management system"
authors = [{name = "Guillaume Gotthard", email = "guillaume.gotthard@psi.ch"}]
license = {text = "MIT"}

View File

@ -59,8 +59,8 @@ const SampleTracker: React.FC<SampleTrackerProps> = ({ activePgroup }) => {
const getSampleColor = (events: Event[] = []) => {
const hasMounted = events.some((e) => e.event_type === 'Mounted');
const hasUnmounted = events.some((e) => e.event_type === 'Unmounted');
const hasMounted = events.some((e) => e.event_type === 'Mounting');
const hasUnmounted = events.some((e) => e.event_type === 'Unmounting');
const hasLost = events.some((e) => e.event_type === 'Lost');
const hasFailed = events.some((e) => e.event_type === 'Failed');

View File

@ -1,7 +1,7 @@
#!/bin/bash
# Extract values from pyproject.toml
PYPROJECT_FILE="$(dirname "$0")/pyproject.toml"
PYPROJECT_FILE="$(dirname "$0")/backend/pyproject.toml"
NAME=$(awk -F'= ' '/^name/ { print $2 }' "$PYPROJECT_FILE" | tr -d '"')
VERSION=$(awk -F'= ' '/^version/ { print $2 }' "$PYPROJECT_FILE" | tr -d '"')

View File

@ -3,8 +3,8 @@
{
"metadata": {
"ExecuteTime": {
"end_time": "2025-03-04T09:22:38.371102Z",
"start_time": "2025-03-04T09:22:37.939673Z"
"end_time": "2025-03-11T15:04:51.407218Z",
"start_time": "2025-03-11T15:04:51.403058Z"
}
},
"cell_type": "code",
@ -41,12 +41,12 @@
"name": "stdout",
"output_type": "stream",
"text": [
"0.1.0a24\n",
"0.1.0a25\n",
"https://127.0.0.1:8000\n"
]
}
],
"execution_count": 1
"execution_count": 48
},
{
"metadata": {},
@ -328,8 +328,8 @@
{
"metadata": {
"ExecuteTime": {
"end_time": "2025-03-03T10:06:39.979358Z",
"start_time": "2025-03-03T10:06:39.957359Z"
"end_time": "2025-03-07T12:40:09.769132Z",
"start_time": "2025-03-07T12:40:09.752103Z"
}
},
"cell_type": "code",
@ -369,48 +369,6 @@
],
"id": "51578d944878db6a",
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Puck ID: 6, Puck Name: PUCK006\n",
" Sample ID: 44, Sample Name: Sample044, Position: 2, Mount count: 1\n",
" Sample ID: 45, Sample Name: Sample045, Position: 3, Mount count: 0\n",
" Sample ID: 46, Sample Name: Sample046, Position: 4, Mount count: 0\n",
" Sample ID: 47, Sample Name: Sample047, Position: 5, Mount count: 1\n",
"Puck ID: 3, Puck Name: PUCK003\n",
" Sample ID: 24, Sample Name: Sample024, Position: 1, Mount count: 0\n",
" Sample ID: 25, Sample Name: Sample025, Position: 5, Mount count: 1\n",
" Sample ID: 26, Sample Name: Sample026, Position: 8, Mount count: 1\n",
" Sample ID: 27, Sample Name: Sample027, Position: 11, Mount count: 1\n",
" Sample ID: 28, Sample Name: Sample028, Position: 12, Mount count: 1\n",
"Puck ID: 2, Puck Name: PUCK002\n",
" Sample ID: 17, Sample Name: Sample017, Position: 4, Mount count: 1\n",
" Sample ID: 18, Sample Name: Sample018, Position: 5, Mount count: 0\n",
" Sample ID: 19, Sample Name: Sample019, Position: 7, Mount count: 1\n",
" Sample ID: 20, Sample Name: Sample020, Position: 10, Mount count: 0\n",
" Sample ID: 21, Sample Name: Sample021, Position: 11, Mount count: 1\n",
" Sample ID: 22, Sample Name: Sample022, Position: 13, Mount count: 0\n",
" Sample ID: 23, Sample Name: Sample023, Position: 16, Mount count: 1\n",
"Puck ID: 1, Puck Name: PUCK-001\n",
" Sample ID: 1, Sample Name: Sample001, Position: 1, Mount count: 1\n",
" Sample ID: 2, Sample Name: Sample002, Position: 2, Mount count: 1\n",
" Sample ID: 3, Sample Name: Sample003, Position: 3, Mount count: 0\n",
" Sample ID: 4, Sample Name: Sample004, Position: 4, Mount count: 0\n",
" Sample ID: 5, Sample Name: Sample005, Position: 5, Mount count: 0\n",
" Sample ID: 6, Sample Name: Sample006, Position: 6, Mount count: 1\n",
" Sample ID: 7, Sample Name: Sample007, Position: 7, Mount count: 0\n",
" Sample ID: 8, Sample Name: Sample008, Position: 8, Mount count: 1\n",
" Sample ID: 9, Sample Name: Sample009, Position: 9, Mount count: 1\n",
" Sample ID: 10, Sample Name: Sample010, Position: 10, Mount count: 1\n",
" Sample ID: 11, Sample Name: Sample011, Position: 11, Mount count: 1\n",
" Sample ID: 12, Sample Name: Sample012, Position: 12, Mount count: 1\n",
" Sample ID: 13, Sample Name: Sample013, Position: 13, Mount count: 0\n",
" Sample ID: 14, Sample Name: Sample014, Position: 14, Mount count: 1\n",
" Sample ID: 15, Sample Name: Sample015, Position: 15, Mount count: 0\n",
" Sample ID: 16, Sample Name: Sample016, Position: 16, Mount count: 1\n"
]
},
{
"name": "stderr",
"output_type": "stream",
@ -420,13 +378,13 @@
]
}
],
"execution_count": 3
"execution_count": 2
},
{
"metadata": {
"ExecuteTime": {
"end_time": "2025-02-26T12:05:03.257159Z",
"start_time": "2025-02-26T12:05:03.232846Z"
"end_time": "2025-03-07T20:50:06.182786Z",
"start_time": "2025-03-07T20:50:06.165153Z"
}
},
"cell_type": "code",
@ -438,12 +396,12 @@
"with aareDBclient.ApiClient(configuration) as api_client:\n",
" # Instance of the API client\n",
" api_instance = aareDBclient.SamplesApi(api_client)\n",
"\n",
" sample_id=258\n",
" try:\n",
" # Define the payload with only `event_type`\n",
" sample_event_create = SampleEventCreate(\n",
" sample_id=16,\n",
" event_type=\"Mounted\" # Valid event type\n",
" sample_id=sample_id,\n",
" event_type=\"Centering\" # Valid event type\n",
" )\n",
"\n",
" # Debug the payload before sending\n",
@ -452,7 +410,7 @@
"\n",
" # Call the API\n",
" api_response = api_instance.create_sample_event_samples_samples_sample_id_events_post(\n",
" sample_id=16, # Ensure this matches a valid sample ID in the database\n",
" sample_id=sample_id, # Ensure this matches a valid sample ID in the database\n",
" sample_event_create=sample_event_create\n",
" )\n",
"\n",
@ -475,20 +433,20 @@
"output_type": "stream",
"text": [
"Payload being sent to API:\n",
"{\"event_type\":\"Mounted\"}\n",
"{\"event_type\":\"Centering\"}\n",
"API response:\n",
"('id', 16)\n",
"('sample_name', 'Sample016')\n",
"('position', 16)\n",
"('puck_id', 1)\n",
"('id', 258)\n",
"('sample_name', 'Sample258')\n",
"('position', 14)\n",
"('puck_id', 26)\n",
"('crystalname', None)\n",
"('proteinname', None)\n",
"('positioninpuck', None)\n",
"('priority', None)\n",
"('comments', None)\n",
"('data_collection_parameters', None)\n",
"('events', [SampleEventResponse(id=399, sample_id=16, event_type='Mounted', timestamp=datetime.datetime(2025, 2, 26, 13, 5, 3))])\n",
"('mount_count', 1)\n",
"('events', [SampleEventResponse(id=492, sample_id=258, event_type='Mounting', timestamp=datetime.datetime(2025, 3, 6, 13, 50)), SampleEventResponse(id=493, sample_id=258, event_type='Unmounting', timestamp=datetime.datetime(2025, 3, 6, 13, 50, 50)), SampleEventResponse(id=573, sample_id=258, event_type='Centering', timestamp=datetime.datetime(2025, 3, 7, 21, 50, 6))])\n",
"('mount_count', 0)\n",
"('unmount_count', 0)\n"
]
},
@ -501,7 +459,7 @@
]
}
],
"execution_count": 80
"execution_count": 46
},
{
"metadata": {},
@ -528,33 +486,45 @@
{
"metadata": {
"ExecuteTime": {
"end_time": "2025-02-26T16:15:33.052345Z",
"start_time": "2025-02-26T16:15:33.022632Z"
"end_time": "2025-03-07T20:51:14.203996Z",
"start_time": "2025-03-07T20:51:14.134135Z"
}
},
"cell_type": "code",
"source": [
"# post images to sample database\n",
"# Post multiple images to the sample database\n",
"\n",
"import os\n",
"import mimetypes\n",
"import requests\n",
"\n",
"# File path to the image\n",
"file_path = \"backend/tests/sample_image/IMG_1942.jpg\"\n",
"filename = os.path.basename(file_path)\n",
"mime_type, _ = mimetypes.guess_type(file_path)\n",
"if mime_type is None:\n",
" mime_type = \"application/octet-stream\"\n",
"# List of file paths to the images you want to upload\n",
"file_paths = [\n",
" \"backend/tests/sample_image/0_200.jpg\",\n",
" \"backend/tests/sample_image/90_200.jpg\",\n",
" \"backend/tests/sample_image/0_700.jpg\",\n",
" \"backend/tests/sample_image/90_700.jpg\",\n",
"]\n",
"\n",
"#file_paths = [\"backend/tests/sample_image/mount.jpeg.jpg\"]\n",
"\n",
"\n",
"# Sample ID (ensure this exists on your backend)\n",
"sample_id = 16\n",
"sample_id = 299\n",
"\n",
"# Build the URL for the upload endpoint.\n",
"# Base URL for the upload endpoint\n",
"url = f\"https://127.0.0.1:8000/samples/{sample_id}/upload-images\"\n",
"\n",
"# Open the file and construct the files dictionary\n",
"with open(file_path, \"rb\") as file_data:\n",
"# Iterate through each file and upload it\n",
"for file_path in file_paths:\n",
" # Determine file name and MIME type\n",
" filename = os.path.basename(file_path)\n",
" mime_type, _ = mimetypes.guess_type(file_path)\n",
" if mime_type is None:\n",
" mime_type = \"application/octet-stream\"\n",
"\n",
" # Open the file for uploading\n",
" with open(file_path, \"rb\") as file_data:\n",
" files = {\n",
" # Use key \"uploaded_file\" as required by your API\n",
" \"uploaded_file\": (filename, file_data, mime_type)\n",
@ -562,17 +532,17 @@
" headers = {\n",
" \"accept\": \"application/json\"\n",
" }\n",
" comment = \"before loop centering\"\n",
"\n",
" # Set verify=False to bypass certificate verification (only use in development)\n",
" # Send the POST request\n",
" print(f\"Uploading {filename}...\")\n",
" response = requests.post(url, headers=headers, files=files, verify=False)\n",
"\n",
"# Check the API response\n",
"print(\"API Response:\")\n",
"print(response.status_code)\n",
"try:\n",
" # Check the API response\n",
" print(f\"API Response for {filename}:\")\n",
" print(response.status_code)\n",
" try:\n",
" print(response.json())\n",
"except Exception:\n",
" except Exception:\n",
" print(response.text)\n"
],
"id": "11f62976d2e7d9b1",
@ -581,21 +551,40 @@
"name": "stdout",
"output_type": "stream",
"text": [
"API Response:\n",
"Uploading 0_200.jpg...\n",
"API Response for 0_200.jpg:\n",
"200\n",
"{'pgroup': 'p20001', 'sample_id': 16, 'filepath': 'images/p20001/2025-02-26/Dewar One/PUCK-001/16/IMG_1942.jpg', 'status': 'active', 'comment': None, 'id': 4}\n"
"{'pgroup': 'p20003', 'sample_id': 299, 'sample_event_id': 572, 'filepath': 'images/p20003/2025-03-07/Dewar Five/PKK007/16/Mounting_2025-03-06_14-31-00/0_200.jpg', 'status': 'active', 'comment': None, 'id': 2}\n",
"Uploading 90_200.jpg...\n",
"API Response for 90_200.jpg:\n",
"200\n",
"{'pgroup': 'p20003', 'sample_id': 299, 'sample_event_id': 572, 'filepath': 'images/p20003/2025-03-07/Dewar Five/PKK007/16/Mounting_2025-03-06_14-31-00/90_200.jpg', 'status': 'active', 'comment': None, 'id': 3}\n",
"Uploading 0_700.jpg...\n",
"API Response for 0_700.jpg:\n",
"200\n",
"{'pgroup': 'p20003', 'sample_id': 299, 'sample_event_id': 572, 'filepath': 'images/p20003/2025-03-07/Dewar Five/PKK007/16/Mounting_2025-03-06_14-31-00/0_700.jpg', 'status': 'active', 'comment': None, 'id': 4}\n",
"Uploading 90_700.jpg...\n",
"API Response for 90_700.jpg:\n",
"200\n",
"{'pgroup': 'p20003', 'sample_id': 299, 'sample_event_id': 572, 'filepath': 'images/p20003/2025-03-07/Dewar Five/PKK007/16/Mounting_2025-03-06_14-31-00/90_700.jpg', 'status': 'active', 'comment': None, 'id': 5}\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/urllib3/connectionpool.py:1097: InsecureRequestWarning: Unverified HTTPS request is being made to host '127.0.0.1'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings\n",
" warnings.warn(\n",
"/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/urllib3/connectionpool.py:1097: InsecureRequestWarning: Unverified HTTPS request is being made to host '127.0.0.1'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings\n",
" warnings.warn(\n",
"/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/urllib3/connectionpool.py:1097: InsecureRequestWarning: Unverified HTTPS request is being made to host '127.0.0.1'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings\n",
" warnings.warn(\n",
"/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/urllib3/connectionpool.py:1097: InsecureRequestWarning: Unverified HTTPS request is being made to host '127.0.0.1'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings\n",
" warnings.warn(\n"
]
}
],
"execution_count": 89
"execution_count": 47
},
{
"metadata": {},
@ -608,8 +597,8 @@
{
"metadata": {
"ExecuteTime": {
"end_time": "2025-03-04T09:17:34.613556Z",
"start_time": "2025-03-04T09:17:34.599367Z"
"end_time": "2025-03-11T15:05:58.348843Z",
"start_time": "2025-03-11T15:05:58.331240Z"
}
},
"cell_type": "code",
@ -624,27 +613,27 @@
")\n",
"\n",
"\n",
"## Build the nested parameters\n",
"#rotation = RotationParameters(\n",
"# omegaStart_deg=0.0,\n",
"# omegaStep=90.0,\n",
"# phi=10.0,\n",
"# chi=0.0,\n",
"# numberOfImages=4,\n",
"# exposureTime_s=0.02\n",
"#)\n",
"\n",
"gridscan = GridScanParamers(\n",
" xStart=0.0,\n",
" xStep=0.1,\n",
" yStart=0.0,\n",
" yStep= 0.1,\n",
" zStart=0.0,\n",
" zStep=0.0,\n",
" numberOfImages=4600,\n",
" exposureTime_s=0.001\n",
"# Build the nested parameters\n",
"rotation = RotationParameters(\n",
" omegaStart_deg=0.0,\n",
" omegaStep=90.0,\n",
" phi=10.0,\n",
" chi=0.0,\n",
" numberOfImages=4,\n",
" exposureTime_s=0.02\n",
")\n",
"\n",
"#gridscan = GridScanParamers(\n",
"# xStart=0.0,\n",
"# xStep=0.1,\n",
"# yStart=0.0,\n",
"# yStep= 0.1,\n",
"# zStart=0.0,\n",
"# zStep=0.0,\n",
"# numberOfImages=4600,\n",
"# exposureTime_s=0.001\n",
"#)\n",
"\n",
"# If your client code requires you to build a detector model,\n",
"# you can either use a Detector model or pass a dictionary.\n",
"# Here we pass a dictionary.\n",
@ -673,8 +662,8 @@
" beamlineFluxAtSample_ph_s=0,\n",
" beamSizeWidth=30.0,\n",
" beamSizeHeight=30.0,\n",
"# rotation=rotation # Optional nested parameter\n",
" gridScan=gridscan\n",
" rotation=rotation # Optional nested parameter\n",
" #gridScan=gridscan\n",
" # gridScan and jet are optional and can be added similarly\n",
")\n",
"\n",
@ -684,7 +673,7 @@
"experiment_params_payload = ExperimentParametersCreate(\n",
" # run_number can be omitted/ignored if computed on the server\n",
" beamline_parameters=beamline_params,\n",
" sample_id=15 # change sample_id to an existing sample in your database\n",
" sample_id=299 # change sample_id to an existing sample in your database\n",
")\n",
"\n",
"# Now, use the API instance to send the POST request\n",
@ -711,7 +700,7 @@
"output_type": "stream",
"text": [
"API Response:\n",
"run_number=3 beamline_parameters=BeamlineParameters(synchrotron='Swiss Light Source', beamline='PXIII', detector=Detector(manufacturer='DECTRIS', model='PILATUS4 2M', type='photon-counting', serial_number='16684dscsd668468', detector_distance_mm=95.0, beam_center_x_px=512.0, beam_center_y_px=512.0, pixel_size_x_um=150.0, pixel_size_y_um=150.0), wavelength=1.0, ring_current_a=0.0, ring_mode='Machine Down', undulator=None, undulatorgap_mm=None, monochromator='Si111', transmission=1.0, focusing_optic='Kirkpatrick-Baez', beamline_flux_at_sample_ph_s=0.0, beam_size_width=30.0, beam_size_height=30.0, rotation=None, grid_scan=GridScanParamers(x_start=0.0, x_step=0.1, y_start=0.0, y_step=0.1, z_start=0.0, z_step=0.0, number_of_images=4600, exposure_time_s=0.001), jet=None, cryojet_temperature_k=None, humidifier_temperature_k=None, humidifier_humidity=None) sample_id=15 id=10\n"
"run_number=1 beamline_parameters=BeamlineParameters(synchrotron='Swiss Light Source', beamline='PXIII', detector=Detector(manufacturer='DECTRIS', model='PILATUS4 2M', type='photon-counting', serial_number='16684dscsd668468', detector_distance_mm=95.0, beam_center_x_px=512.0, beam_center_y_px=512.0, pixel_size_x_um=150.0, pixel_size_y_um=150.0), wavelength=1.0, ring_current_a=0.0, ring_mode='Machine Down', undulator=None, undulatorgap_mm=None, monochromator='Si111', transmission=1.0, focusing_optic='Kirkpatrick-Baez', beamline_flux_at_sample_ph_s=0.0, beam_size_width=30.0, beam_size_height=30.0, rotation=RotationParameters(omega_start_deg=0.0, omega_step=90.0, chi=0.0, phi=10.0, number_of_images=4, exposure_time_s=0.02), grid_scan=None, jet=None, cryojet_temperature_k=None, humidifier_temperature_k=None, humidifier_humidity=None) sample_id=299 id=1\n"
]
},
{
@ -723,7 +712,7 @@
]
}
],
"execution_count": 12
"execution_count": 49
},
{
"metadata": {