Add dataset, slurm_id, and FAILED status to models

Enhanced the models with new fields: a dataset field for Experiment Parameters and a slurm_id for Jobs. Introduced a FAILED status for the JobStatus enum. Updated functionality to handle datasets and trigger job creation based on dataset status.
This commit is contained in:
GotthardG
2025-04-29 22:52:36 +02:00
parent 9af2e84f9e
commit 57de665c7b
8 changed files with 202 additions and 55 deletions

View File

@ -273,6 +273,7 @@ class ExperimentParameters(Base):
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
run_number = Column(Integer, nullable=False)
beamline_parameters = Column(JSON, nullable=True)
dataset = Column(JSON, nullable=True)
sample_id = Column(Integer, ForeignKey("samples.id"), nullable=False)
@ -311,6 +312,7 @@ class JobStatus(str, enum.Enum):
TODO = "todo"
SUBMITTED = "submitted"
DONE = "done"
FAILED = "failed"
class Jobs(Base):
@ -323,3 +325,4 @@ class Jobs(Base):
experiment_parameters = relationship(ExperimentParameters)
created_at = Column(DateTime, server_default=func.now())
updated_at = Column(DateTime, onupdate=func.now())
slurm_id = Column(Integer, nullable=True)

View File

@ -41,6 +41,7 @@ async def job_event_generator(db: Session):
experiment_parameters=experiment.beamline_parameters
if experiment
else None,
slurm_id=None,
)
job_items.append(job_item)
@ -75,13 +76,14 @@ def update_jobs_status(payload: JobsUpdate, db: Session = Depends(get_db)):
# Update the status
job.status = payload.status
job.slurm_id = payload.slurm_id
# Optionally update 'updated_at'
from sqlalchemy import func
from datetime import datetime
job.updated_at = func.now()
job.updated_at = datetime.now()
db.commit()
db.refresh(job)
# Return the updated job's info as response
return JobsUpdate(job_id=job.id, status=job.status)
return JobsUpdate(job_id=job.id, status=job.status, slurm_id=job.slurm_id)

View File

@ -1,4 +1,5 @@
from fastapi import APIRouter, HTTPException, Depends, UploadFile, File, Form
from fastapi.encoders import jsonable_encoder
from sqlalchemy.orm import Session
from pathlib import Path
from typing import List
@ -18,6 +19,7 @@ from app.schemas import (
ResultResponse,
ResultCreate,
Results as ProcessingResults,
Datasets,
)
from app.models import (
Puck as PuckModel,
@ -350,19 +352,60 @@ def create_experiment_parameters_for_sample(
db.add(new_event)
db.commit()
new_job = JobModel(
sample_id=sample_id,
run_id=new_exp.id,
experiment_parameters=new_exp, # not sure yet
status=JobStatus.TODO,
)
db.add(new_job)
db.commit()
db.refresh(new_job)
return new_exp
@router.patch(
"/update-dataset/{sample_id}/{run_id}",
response_model=ExperimentParametersRead,
operation_id="update_dataset_for_experiment_run",
)
def update_experiment_run_dataset(
sample_id: int,
run_id: int,
dataset: Datasets,
db: Session = Depends(get_db),
):
# Find the run for this sample and run_id
exp = (
db.query(ExperimentParametersModel)
.filter(
ExperimentParametersModel.sample_id == sample_id,
ExperimentParametersModel.id == run_id,
)
.first()
)
if not exp:
raise HTTPException(
status_code=404,
detail="ExperimentParameters (run) not found for this sample",
)
exp.dataset = jsonable_encoder(dataset)
db.commit()
db.refresh(exp)
# Only create a job if status is "written" and job does not exist yet
if dataset.status == "written":
job_exists = (
db.query(JobModel)
.filter(JobModel.sample_id == sample_id, JobModel.run_id == run_id)
.first()
)
if not job_exists:
new_job = JobModel(
sample_id=sample_id,
run_id=run_id,
experiment_parameters=exp, # adjust this line as appropriate
status=JobStatus.TODO,
)
db.add(new_job)
db.commit()
db.refresh(new_job)
return exp
@router.post(
"/processing-results", response_model=ResultResponse, operation_id="create_result"
)

View File

@ -898,9 +898,16 @@ class BeamlineParameters(BaseModel):
# beamstopDiameter_mm: Optional[float] = None
class Datasets(BaseModel):
filepath: str
status: str
written_at: datetime
class ExperimentParametersBase(BaseModel):
run_number: int
beamline_parameters: Optional[BeamlineParameters] = None
dataset: Optional[Datasets] = None
sample_id: int
@ -915,6 +922,12 @@ class ExperimentParametersRead(ExperimentParametersBase):
from_attributes = True
class ExperimentParametersUpdate(BaseModel):
run_number: int
dataset: Optional[Datasets]
sample_id: int
class SampleResult(BaseModel):
sample_id: int
sample_name: str
@ -952,6 +965,7 @@ class JobsCreate(BaseModel):
created_at: datetime
updated_at: datetime
experiment_parameters: BeamlineParameters
slurm_id: Optional[int] = None
class Config:
from_attributes = True
@ -968,8 +982,10 @@ class JobsResponse(BaseModel):
updated_at: Optional[datetime]
data_collection_parameters: Optional[DataCollectionParameters] = None
experiment_parameters: BeamlineParameters
slurm_id: Optional[int] = None
class JobsUpdate(BaseModel):
job_id: int
status: str
slurm_id: int

View File

@ -154,8 +154,8 @@ async def lifespan(app: FastAPI):
load_slots_data(db)
else: # dev or test environments
print(f"{environment.capitalize()} environment: Regenerating database.")
Base.metadata.drop_all(bind=engine)
Base.metadata.create_all(bind=engine)
# Base.metadata.drop_all(bind=engine)
# Base.metadata.create_all(bind=engine)
# from sqlalchemy.engine import reflection
# from app.models import ExperimentParameters # adjust the import as needed
# inspector = reflection.Inspector.from_engine(engine)

View File

@ -9,7 +9,7 @@
"is_executing": true
},
"ExecuteTime": {
"start_time": "2025-04-29T12:21:15.813326Z"
"start_time": "2025-04-29T20:30:28.354331Z"
}
},
"source": [
@ -20,10 +20,11 @@
"SSE_URL = \"https://127.0.0.1:8000/processing/jobs/stream\"\n",
"UPDATE_URL = \"https://127.0.0.1:8000/processing/jobs/update_status\"\n",
"\n",
"def submit_job_update(job_id, status):\n",
"def submit_job_update(job_id, status, slurm_id):\n",
" payload = {\n",
" \"job_id\": job_id,\n",
" \"status\": status\n",
" \"status\": status,\n",
" \"slurm_id\": slurm_id,\n",
" }\n",
" try:\n",
" response = requests.post(UPDATE_URL, json=payload, verify=False)\n",
@ -48,11 +49,11 @@
" jobs = json.loads(event.data)\n",
" print(f\"Jobs received: {jobs}\")\n",
"\n",
" #for job in jobs:\n",
" # job_id = job.get(\"job_id\")\n",
" # print(f\"Job ID: {job_id}, Current status: {job.get('status')}\")\n",
" # # Immediately update status to \"submitted\"\n",
" # submit_job_update(job_id, \"submitted\")\n",
" for job in jobs:\n",
" job_id = job.get(\"job_id\")\n",
" print(f\"Job ID: {job_id}, Current status: {job.get('status')}\")\n",
" # Immediately update status to \"submitted\"\n",
" submit_job_update(job_id, \"submitted\", 76545678)\n",
" except json.JSONDecodeError as e:\n",
" print(f\"Error decoding event data: {e}\")\n",
" except Exception as e:\n",
@ -66,13 +67,18 @@
"name": "stdout",
"output_type": "stream",
"text": [
"Starting job status updater...\n"
"Starting job status updater...\n",
"Jobs received: [{'job_id': 1, 'sample_id': 230, 'run_id': 1, 'sample_name': 'Sample230', 'status': 'todo', 'type': 'default_type', 'created_at': '2025-04-29T20:25:20.888394', 'updated_at': None, 'data_collection_parameters': None, 'experiment_parameters': {'synchrotron': 'Swiss Light Source', 'beamline': 'PXIII', 'detector': {'manufacturer': 'DECTRIS', 'model': 'PILATUS4 2M', 'type': 'photon-counting', 'serialNumber': '16684dscsd668468', 'detectorDistance_mm': 95.0, 'beamCenterX_px': 512.0, 'beamCenterY_px': 512.0, 'pixelSizeX_um': 150.0, 'pixelSizeY_um': 150.0}, 'wavelength': 1.0, 'ringCurrent_A': 0.0, 'ringMode': 'Machine Down', 'undulator': None, 'undulatorgap_mm': None, 'monochromator': 'Si111', 'transmission': 1.0, 'focusingOptic': 'Kirkpatrick-Baez', 'beamlineFluxAtSample_ph_s': 0.0, 'beamSizeWidth': 30.0, 'beamSizeHeight': 30.0, 'characterization': None, 'rotation': {'omegaStart_deg': 0.0, 'omegaStep': 0.1, 'chi': 0.0, 'phi': 10.0, 'numberOfImages': 3600, 'exposureTime_s': 0.02}, 'gridScan': None, 'jet': None, 'cryojetTemperature_K': None, 'humidifierTemperature_K': None, 'humidifierHumidity': None}, 'slurm_id': None}]\n",
"Job ID: 1, Current status: todo\n",
"✅ Job 1 status updated to 'submitted'. Response: {'job_id': 1, 'status': 'submitted', 'slurm_id': 76545678}\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/urllib3/connectionpool.py:1103: InsecureRequestWarning: Unverified HTTPS request is being made to host '127.0.0.1'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings\n",
" warnings.warn(\n",
"/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/urllib3/connectionpool.py:1103: InsecureRequestWarning: Unverified HTTPS request is being made to host '127.0.0.1'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings\n",
" warnings.warn(\n"
]

View File

@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "aareDB"
version = "0.1.1a1"
version = "0.1.1a2"
description = "Backend for next gen sample management system"
authors = [{name = "Guillaume Gotthard", email = "guillaume.gotthard@psi.ch"}]
license = {text = "MIT"}

View File

@ -3,8 +3,8 @@
{
"metadata": {
"ExecuteTime": {
"end_time": "2025-04-29T11:59:40.825956Z",
"start_time": "2025-04-29T11:59:40.822492Z"
"end_time": "2025-04-29T20:17:17.478562Z",
"start_time": "2025-04-29T20:17:16.842590Z"
}
},
"cell_type": "code",
@ -43,12 +43,12 @@
"name": "stdout",
"output_type": "stream",
"text": [
"0.1.1a1\n",
"0.1.1a2\n",
"https://localhost:8000\n"
]
}
],
"execution_count": 31
"execution_count": 1
},
{
"metadata": {},
@ -427,21 +427,21 @@
{
"metadata": {
"ExecuteTime": {
"end_time": "2025-04-29T12:00:18.905592Z",
"start_time": "2025-04-29T12:00:18.903773Z"
"end_time": "2025-04-29T20:17:43.335654Z",
"start_time": "2025-04-29T20:17:43.333828Z"
}
},
"cell_type": "code",
"source": "sample_id = 250",
"source": "sample_id = 230",
"id": "54d4d46ca558e7b9",
"outputs": [],
"execution_count": 32
"execution_count": 2
},
{
"metadata": {
"ExecuteTime": {
"end_time": "2025-04-29T12:00:20.537319Z",
"start_time": "2025-04-29T12:00:20.510157Z"
"end_time": "2025-04-29T20:17:46.414835Z",
"start_time": "2025-04-29T20:17:46.379138Z"
}
},
"cell_type": "code",
@ -493,7 +493,7 @@
"DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): localhost:8000\n",
"/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/urllib3/connectionpool.py:1103: InsecureRequestWarning: Unverified HTTPS request is being made to host 'localhost'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings\n",
" warnings.warn(\n",
"DEBUG:urllib3.connectionpool:https://localhost:8000 \"POST /samples/samples/250/events HTTP/1.1\" 200 412\n"
"DEBUG:urllib3.connectionpool:https://localhost:8000 \"POST /samples/samples/230/events HTTP/1.1\" 200 413\n"
]
},
{
@ -503,9 +503,9 @@
"Payload being sent to API:\n",
"{\"event_type\":\"Collecting\"}\n",
"API response:\n",
"('id', 250)\n",
"('sample_name', 'Sample250')\n",
"('position', 9)\n",
"('id', 230)\n",
"('sample_name', 'Sample230')\n",
"('position', 16)\n",
"('puck_id', 30)\n",
"('crystalname', None)\n",
"('proteinname', None)\n",
@ -513,19 +513,22 @@
"('priority', None)\n",
"('comments', None)\n",
"('data_collection_parameters', None)\n",
"('events', [SampleEventResponse(event_type='Mounting', id=490, sample_id=250, timestamp=datetime.datetime(2025, 4, 28, 13, 42)), SampleEventResponse(event_type='Collecting', id=491, sample_id=250, timestamp=datetime.datetime(2025, 4, 29, 12, 0, 20, 528955))])\n",
"('events', [SampleEventResponse(event_type='Mounting', id=453, sample_id=230, timestamp=datetime.datetime(2025, 4, 28, 13, 22)), SampleEventResponse(event_type='Collecting', id=454, sample_id=230, timestamp=datetime.datetime(2025, 4, 29, 20, 17, 46, 405264))])\n",
"('mount_count', 0)\n",
"('unmount_count', 0)\n"
]
}
],
"execution_count": 33
"execution_count": 3
},
{
"metadata": {},
"metadata": {
"ExecuteTime": {
"end_time": "2025-04-29T14:27:46.730515Z",
"start_time": "2025-04-29T14:27:46.622922Z"
}
},
"cell_type": "code",
"outputs": [],
"execution_count": null,
"source": [
"### not working\n",
"with aareDBclient.ApiClient(configuration) as api_client:\n",
@ -541,13 +544,27 @@
" except ApiException as e:\n",
" print(\"Exception when calling get_last_sample_event: %s\\n\" % e)\n"
],
"id": "f1d171700d6cf7fe"
"id": "f1d171700d6cf7fe",
"outputs": [
{
"ename": "AttributeError",
"evalue": "'SamplesApi' object has no attribute 'get_last_sample_event_samples_samples_sample_id_events_last_get'",
"output_type": "error",
"traceback": [
"\u001B[0;31m---------------------------------------------------------------------------\u001B[0m",
"\u001B[0;31mAttributeError\u001B[0m Traceback (most recent call last)",
"Cell \u001B[0;32mIn[48], line 8\u001B[0m\n\u001B[1;32m 4\u001B[0m api_instance \u001B[38;5;241m=\u001B[39m aareDBclient\u001B[38;5;241m.\u001B[39mSamplesApi(api_client)\n\u001B[1;32m 6\u001B[0m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[1;32m 7\u001B[0m \u001B[38;5;66;03m# Get the last sample event\u001B[39;00m\n\u001B[0;32m----> 8\u001B[0m last_event_response \u001B[38;5;241m=\u001B[39m \u001B[43mapi_instance\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mget_last_sample_event_samples_samples_sample_id_events_last_get\u001B[49m(\u001B[38;5;241m27\u001B[39m)\n\u001B[1;32m 9\u001B[0m \u001B[38;5;28mprint\u001B[39m(\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124mThe response of get_last_sample_event:\u001B[39m\u001B[38;5;130;01m\\n\u001B[39;00m\u001B[38;5;124m\"\u001B[39m)\n\u001B[1;32m 10\u001B[0m pprint(last_event_response)\n",
"\u001B[0;31mAttributeError\u001B[0m: 'SamplesApi' object has no attribute 'get_last_sample_event_samples_samples_sample_id_events_last_get'"
]
}
],
"execution_count": 48
},
{
"metadata": {
"ExecuteTime": {
"end_time": "2025-04-29T12:21:22.613961Z",
"start_time": "2025-04-29T12:21:22.585856Z"
"end_time": "2025-04-29T20:17:52.059793Z",
"start_time": "2025-04-29T20:17:51.939047Z"
}
},
"cell_type": "code",
@ -618,7 +635,7 @@
"DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): 127.0.0.1:8000\n",
"/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/urllib3/connectionpool.py:1103: InsecureRequestWarning: Unverified HTTPS request is being made to host '127.0.0.1'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings\n",
" warnings.warn(\n",
"DEBUG:urllib3.connectionpool:https://127.0.0.1:8000 \"POST /samples/250/upload-images HTTP/1.1\" 200 204\n"
"DEBUG:urllib3.connectionpool:https://127.0.0.1:8000 \"POST /samples/230/upload-images HTTP/1.1\" 200 205\n"
]
},
{
@ -628,11 +645,11 @@
"Uploading after_dc.jpeg.jpg...\n",
"API Response for after_dc.jpeg.jpg:\n",
"200\n",
"{'pgroup': 'p20003', 'sample_id': 250, 'sample_event_id': 492, 'filepath': 'images/p20003/2025-04-29/Dewar Five/PKK007/9/Collecting_2025-04-29_12-00-30/after_dc.jpeg.jpg', 'status': 'active', 'comment': None, 'id': 2}\n"
"{'pgroup': 'p20003', 'sample_id': 230, 'sample_event_id': 454, 'filepath': 'images/p20003/2025-04-29/Dewar Five/PKK007/16/Collecting_2025-04-29_20-17-46/after_dc.jpeg.jpg', 'status': 'active', 'comment': None, 'id': 1}\n"
]
}
],
"execution_count": 36
"execution_count": 4
},
{
"metadata": {},
@ -645,8 +662,8 @@
{
"metadata": {
"ExecuteTime": {
"end_time": "2025-04-29T12:21:30.879657Z",
"start_time": "2025-04-29T12:21:30.855622Z"
"end_time": "2025-04-29T20:17:56.494919Z",
"start_time": "2025-04-29T20:17:56.469538Z"
}
},
"cell_type": "code",
@ -761,7 +778,7 @@
"DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): localhost:8000\n",
"/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/urllib3/connectionpool.py:1103: InsecureRequestWarning: Unverified HTTPS request is being made to host 'localhost'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings\n",
" warnings.warn(\n",
"DEBUG:urllib3.connectionpool:https://localhost:8000 \"POST /samples/samples/250/experiment_parameters HTTP/1.1\" 200 860\n"
"DEBUG:urllib3.connectionpool:https://localhost:8000 \"POST /samples/samples/230/experiment_parameters HTTP/1.1\" 200 875\n"
]
},
{
@ -769,11 +786,71 @@
"output_type": "stream",
"text": [
"API Response:\n",
"run_number=2 beamline_parameters=BeamlineParameters(synchrotron='Swiss Light Source', beamline='PXIII', detector=Detector(manufacturer='DECTRIS', model='PILATUS4 2M', type='photon-counting', serial_number='16684dscsd668468', detector_distance_mm=95.0, beam_center_x_px=512.0, beam_center_y_px=512.0, pixel_size_x_um=150.0, pixel_size_y_um=150.0), wavelength=1.0, ring_current_a=0.0, ring_mode='Machine Down', undulator=None, undulatorgap_mm=None, monochromator='Si111', transmission=1.0, focusing_optic='Kirkpatrick-Baez', beamline_flux_at_sample_ph_s=0.0, beam_size_width=30.0, beam_size_height=30.0, characterization=None, rotation=RotationParameters(omega_start_deg=0.0, omega_step=0.1, chi=0.0, phi=10.0, number_of_images=3600, exposure_time_s=0.02), grid_scan=None, jet=None, cryojet_temperature_k=None, humidifier_temperature_k=None, humidifier_humidity=None) sample_id=250 id=2\n"
"run_number=1 beamline_parameters=BeamlineParameters(synchrotron='Swiss Light Source', beamline='PXIII', detector=Detector(manufacturer='DECTRIS', model='PILATUS4 2M', type='photon-counting', serial_number='16684dscsd668468', detector_distance_mm=95.0, beam_center_x_px=512.0, beam_center_y_px=512.0, pixel_size_x_um=150.0, pixel_size_y_um=150.0), wavelength=1.0, ring_current_a=0.0, ring_mode='Machine Down', undulator=None, undulatorgap_mm=None, monochromator='Si111', transmission=1.0, focusing_optic='Kirkpatrick-Baez', beamline_flux_at_sample_ph_s=0.0, beam_size_width=30.0, beam_size_height=30.0, characterization=None, rotation=RotationParameters(omega_start_deg=0.0, omega_step=0.1, chi=0.0, phi=10.0, number_of_images=3600, exposure_time_s=0.02), grid_scan=None, jet=None, cryojet_temperature_k=None, humidifier_temperature_k=None, humidifier_humidity=None) dataset=None sample_id=230 id=1\n"
]
}
],
"execution_count": 37
"execution_count": 5
},
{
"metadata": {
"ExecuteTime": {
"end_time": "2025-04-29T20:29:27.657874Z",
"start_time": "2025-04-29T20:29:27.623737Z"
}
},
"cell_type": "code",
"source": [
"from datetime import datetime\n",
"\n",
"sample_id = sample_id\n",
"run_id = 1\n",
"\n",
"def test_mark_run_written(sample_id, run_id, configuration):\n",
" # Prepare your dataset dict as required by your API (.dict() results if using Pydantic model)\n",
" dataset_payload = {\n",
" \"filepath\": \"/das/work/p11/p11206/raw_data/vincent/20250415_6D_SLS2_1st_data/20250415_fullbeam_dtz220_Lyso102_again_360deg\",\n",
" \"status\": \"written\",\n",
" \"written_at\": datetime.now().isoformat()\n",
" }\n",
"\n",
" # The API method and argument names may differ if autogenerated — adjust accordingly\n",
" with aareDBclient.ApiClient(configuration) as api_client:\n",
" api_instance = aareDBclient.SamplesApi(api_client)\n",
" try:\n",
" api_response = api_instance.update_dataset_for_experiment_run(\n",
" sample_id=sample_id, run_id=run_id, datasets=dataset_payload\n",
" )\n",
" print(\"Dataset updated successfully:\")\n",
" print(api_response)\n",
" except ApiException as e:\n",
" print(f\"API call failed: {e}\")\n",
"\n",
"# Usage example (replace with your actual IDs and configuration):\n",
"test_mark_run_written(sample_id, run_id, configuration)"
],
"id": "77793ecd843c1ef3",
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): localhost:8000\n",
"/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/urllib3/connectionpool.py:1103: InsecureRequestWarning: Unverified HTTPS request is being made to host 'localhost'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings\n",
" warnings.warn(\n",
"DEBUG:urllib3.connectionpool:https://localhost:8000 \"PATCH /samples/update-dataset/230/1 HTTP/1.1\" 200 1056\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Dataset updated successfully:\n",
"run_number=1 beamline_parameters=BeamlineParameters(synchrotron='Swiss Light Source', beamline='PXIII', detector=Detector(manufacturer='DECTRIS', model='PILATUS4 2M', type='photon-counting', serial_number='16684dscsd668468', detector_distance_mm=95.0, beam_center_x_px=512.0, beam_center_y_px=512.0, pixel_size_x_um=150.0, pixel_size_y_um=150.0), wavelength=1.0, ring_current_a=0.0, ring_mode='Machine Down', undulator=None, undulatorgap_mm=None, monochromator='Si111', transmission=1.0, focusing_optic='Kirkpatrick-Baez', beamline_flux_at_sample_ph_s=0.0, beam_size_width=30.0, beam_size_height=30.0, characterization=None, rotation=RotationParameters(omega_start_deg=0.0, omega_step=0.1, chi=0.0, phi=10.0, number_of_images=3600, exposure_time_s=0.02), grid_scan=None, jet=None, cryojet_temperature_k=None, humidifier_temperature_k=None, humidifier_humidity=None) dataset=Datasets(filepath='/das/work/p11/p11206/raw_data/vincent/20250415_6D_SLS2_1st_data/20250415_fullbeam_dtz220_Lyso102_again_360deg', status='written', written_at=datetime.datetime(2025, 4, 29, 22, 29, 27, 626524)) sample_id=230 id=1\n"
]
}
],
"execution_count": 13
},
{
"metadata": {