Add SetTellPositionRequest schema and minor cleanup.

Added a new `SetTellPositionRequest` schema in `schemas.py` to support bulk updates of TELL positions. Commented out redundant metadata operations in `main.py` and cleaned up unused content in the test notebook for better readability.
This commit is contained in:
GotthardG
2025-02-04 14:43:59 +01:00
parent fef9b1c618
commit 780ba1959f
4 changed files with 553 additions and 380 deletions

View File

@ -208,6 +208,7 @@ class PuckEvent(Base):
id = Column(Integer, primary_key=True, index=True) id = Column(Integer, primary_key=True, index=True)
puck_id = Column(Integer, ForeignKey("pucks.id")) puck_id = Column(Integer, ForeignKey("pucks.id"))
tell = Column(String(255), nullable=True)
tell_position = Column(String(255), nullable=True) tell_position = Column(String(255), nullable=True)
event_type = Column(String(255), index=True) event_type = Column(String(255), index=True)
timestamp = Column(DateTime, default=datetime.now) timestamp = Column(DateTime, default=datetime.now)

View File

@ -11,14 +11,13 @@ from app.schemas import (
PuckUpdate, PuckUpdate,
PuckWithTellPosition, PuckWithTellPosition,
Sample, Sample,
SetTellPosition, SetTellPositionRequest,
DataCollectionParameters, DataCollectionParameters,
) )
from app.models import ( from app.models import (
Puck as PuckModel, Puck as PuckModel,
PuckEvent as PuckEventModel, PuckEvent as PuckEventModel,
Sample as SampleModel, Sample as SampleModel,
Slot as SlotModel,
LogisticsEvent as LogisticsEventModel, LogisticsEvent as LogisticsEventModel,
Dewar as DewarModel, Dewar as DewarModel,
) )
@ -30,6 +29,15 @@ router = APIRouter()
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
VALID_TELL_OPTIONS = {"X06SA", "X06DA", "X10SA"}
def validate_tell(tell: str):
if tell not in VALID_TELL_OPTIONS:
raise ValueError(
f"Invalid tell: {tell}. Must be one of {', '.join(VALID_TELL_OPTIONS)}"
)
def normalize_puck_name(name: str) -> str: def normalize_puck_name(name: str) -> str:
""" """
@ -39,6 +47,154 @@ def normalize_puck_name(name: str) -> str:
return name return name
def resolve_slot_id(slot_identifier: str) -> int:
"""
Convert a slot identifier (either numeric or alias) to a numeric slot ID.
Args:
slot_identifier (str): The slot identifier to resolve (e.g., "PXI",
"PXII", "48").
Returns:
int: The numeric slot ID corresponding to the identifier.
Raises:
HTTPException: If the slot identifier is invalid or unrecognized.
"""
# Map slot identifier keywords to numeric slot IDs
slot_aliases = {
"PXI": 47,
"PXII": 48,
"PXIII": 49,
"X06SA": 47,
"X10SA": 48,
"X06DA": 49,
}
# Try to resolve the identifier
try:
return int(slot_identifier) # If it's a numeric slot ID, return it directly
except ValueError:
# Convert alias to slot ID using the mapping
slot_id = slot_aliases.get(slot_identifier.upper())
if slot_id:
return slot_id
# Log error and raise an exception for invalid identifiers
logger.error(f"Invalid slot identifier: {slot_identifier}")
raise HTTPException(
status_code=400, detail=f"Invalid slot identifier: {slot_identifier}"
)
def get_pucks_at_beamline(slot_id: int, db: Session) -> List[PuckWithTellPosition]:
"""
Fetch all pucks currently located at the beamline for a given slot ID.
"""
# Subquery: Latest logistic event for each dewar
latest_event_subquery = (
db.query(
LogisticsEventModel.dewar_id.label("dewar_id"),
func.max(LogisticsEventModel.timestamp).label("latest_event_time"),
)
.group_by(LogisticsEventModel.dewar_id)
.subquery(name="latest_event_subquery")
)
# Query dewars in the slot with the latest event "beamline"
dewars = (
db.query(DewarModel)
.join(LogisticsEventModel, DewarModel.id == LogisticsEventModel.dewar_id)
.join(
latest_event_subquery,
(LogisticsEventModel.dewar_id == latest_event_subquery.c.dewar_id)
& (
LogisticsEventModel.timestamp
== latest_event_subquery.c.latest_event_time
),
)
.filter(
LogisticsEventModel.slot_id == slot_id,
LogisticsEventModel.event_type == "beamline",
)
.all()
)
if not dewars:
logger.warning(f"No dewars found for slot ID: {slot_id}")
return []
# Map dewars to their details
dewar_ids = [dewar.id for dewar in dewars]
dewar_map = {dewar.id: dewar.dewar_name for dewar in dewars}
dewar_pgroups = {dewar.id: dewar.pgroups for dewar in dewars}
# Subquery: Latest event for each puck
latest_puck_event_subquery = (
db.query(
PuckEventModel.puck_id.label("puck_id"),
func.max(PuckEventModel.timestamp).label("latest_event_time"),
)
.group_by(PuckEventModel.puck_id)
.subquery(name="latest_event_subquery")
)
# Query pucks for the selected dewars
pucks_with_latest_events = (
db.query(
PuckModel,
PuckEventModel.event_type,
PuckEventModel.tell_position,
DewarModel,
)
.join(
latest_puck_event_subquery,
PuckModel.id == latest_puck_event_subquery.c.puck_id,
isouter=True,
)
.join(
PuckEventModel,
(PuckEventModel.puck_id == latest_puck_event_subquery.c.puck_id)
& (
PuckEventModel.timestamp
== latest_puck_event_subquery.c.latest_event_time
),
isouter=True,
)
.join(DewarModel, PuckModel.dewar_id == DewarModel.id, isouter=True)
.filter(PuckModel.dewar_id.in_(dewar_ids))
.all()
)
# Prepare the results
results = []
for puck, event_type, tell_position, dewar in pucks_with_latest_events:
dewar_name = dewar_map.get(puck.dewar_id, "Unknown")
pgroup = dewar_pgroups.get(puck.dewar_id)
# For pucks with no events or whose latest event is "puck_removed", set
# tell_position to None
if event_type is None or event_type == "puck_removed":
tell_position = None
results.append(
PuckWithTellPosition(
id=puck.id,
pgroup=pgroup,
puck_name=puck.puck_name,
puck_type=puck.puck_type,
puck_location_in_dewar=int(puck.puck_location_in_dewar)
if puck.puck_location_in_dewar
else None,
dewar_id=puck.dewar_id,
dewar_name=dewar_name,
tell_position=tell_position,
)
)
return results
@router.get("/", response_model=List[PuckSchema]) @router.get("/", response_model=List[PuckSchema])
async def get_pucks(db: Session = Depends(get_db)): async def get_pucks(db: Session = Depends(get_db)):
return db.query(PuckModel).all() return db.query(PuckModel).all()
@ -46,146 +202,235 @@ async def get_pucks(db: Session = Depends(get_db)):
@router.put("/set-tell-positions", status_code=status.HTTP_200_OK) @router.put("/set-tell-positions", status_code=status.HTTP_200_OK)
async def set_tell_positions( async def set_tell_positions(
pucks: List[SetTellPosition], db: Session = Depends(get_db) payload: SetTellPositionRequest, # Accept the wrapped request as a single payload
db: Session = Depends(get_db),
): ):
results = [] # Extract the tell position (slot identifier) and the list of pucks
tell = payload.tell
pucks = payload.pucks
# Retrieve all pucks in the database with their most recent try:
# `tell_position_set` event # Resolve slot ID from the provided identifier
all_pucks_with_last_event = ( slot_id = resolve_slot_id(tell)
db.query(PuckModel, PuckEventModel) except Exception as e:
.outerjoin(PuckEventModel, PuckEventModel.puck_id == PuckModel.id) raise HTTPException(
.order_by(PuckEventModel.puck_id, PuckEventModel.timestamp.desc()) status_code=400,
.all() detail=f"Invalid tell or slot identifier: {tell}." f" Error: {str(e)}",
) )
# Dictionary mapping each puck's ID to its latest event # Fetch existing pucks at the beamline slot
last_events = {} pucks_at_beamline = get_pucks_at_beamline(slot_id, db)
for puck, last_event in all_pucks_with_last_event: beamline_puck_map = {
if puck.id not in last_events: # Only store the latest event for each puck normalize_puck_name(puck.puck_name): puck for puck in pucks_at_beamline
last_events[puck.id] = last_event }
# Track processed puck IDs to avoid double-processing # Check if the payload has any pucks
processed_pucks = set() if not pucks: # Empty payload case
results = []
# Process pucks provided in the payload # Deduplicate pucks based on ID
for puck_data in pucks: unique_pucks_at_beamline = {
try: puck.id: puck for puck in pucks_at_beamline
# Extract data from input }.values()
puck_name = puck_data.puck_name
new_position = (
puck_data.tell_position
) # Combined from segment + puck_in_segment
normalized_name = normalize_puck_name(puck_name)
# Find puck in the database for puck in unique_pucks_at_beamline:
puck = ( # Fetch the most recent event for the puck
db.query(PuckModel) last_event = (
db.query(PuckEventModel)
.filter( .filter(
func.replace(func.upper(PuckModel.puck_name), "-", "") PuckEventModel.puck_id == puck.id,
== normalized_name
) )
.order_by(
PuckEventModel.id.desc()
) # Order by timestamp ensures we get the latest event
.first() .first()
) )
if not puck: # Log the last event for the puck
raise ValueError(f"Puck with name '{puck_name}' not found.") if last_event:
logger.info(
f"Processing puck: {puck.puck_name}, "
f"Last Event -> Type: {last_event.event_type}, "
f"Tell Position: {last_event.tell_position}, "
f"Timestamp: {last_event.timestamp}"
)
else:
logger.info(
f"Processing puck: {puck.puck_name}, No events found for this puck"
)
# Mark this puck as processed # Remove all pucks, including those without events or with None
processed_pucks.add(puck.id) # tell_position
if last_event.tell_position is not None:
try:
# Add a puck_removed event
remove_event = PuckEventModel(
puck_id=puck.id,
tell=None, # Nullify the `tell` for removal
tell_position=None,
event_type="puck_removed",
timestamp=datetime.utcnow(),
)
db.add(remove_event)
# Query the last event for this puck # Record this removal in the response
last_event = last_events.get(puck.id) results.append(
{
"puck_name": puck.puck_name,
"tell": tell,
"removed_position": last_event.tell_position
if last_event
else None,
"status": "removed",
"message": "Puck removed due to empty payload.",
}
)
except Exception as e:
# Handle and log the error for this particular puck
results.append(
{
"puck_name": puck.puck_name,
"error": str(e),
}
)
# Rule 1: Skip if the last event's `tell_position` matches the new position # Commit all removal events and return the results
if last_event and last_event.tell_position == new_position: db.commit()
return results
# If the payload contains pucks, continue with the regular logic
results = []
processed_pucks = (
set()
) # To track pucks that have been processed (unchanged or updated)
# Existing pucks' most recent events
last_events_map = {
puck.id: db.query(PuckEventModel)
.filter(
PuckEventModel.puck_id == puck.id,
PuckEventModel.event_type == "tell_position_set",
)
.order_by(PuckEventModel.timestamp.desc())
.first()
for puck in pucks_at_beamline
}
# Step 1: Process each puck in the payload
for puck_data in pucks:
try:
puck_name = puck_data.puck_name
normalized_name = normalize_puck_name(puck_name)
new_position = puck_data.tell_position
existing_puck = beamline_puck_map.get(normalized_name)
if not existing_puck:
# If the puck is not found, it's a potential error
results.append( results.append(
{ {
"puck_name": puck.puck_name, "puck_name": puck_name,
"current_position": new_position, "error": f"Puck '{puck_name}' not found at the beamline.",
"status": "unchanged",
"message": "No change in tell_position. No event created.",
} }
) )
continue continue
# Rule 2: Add a "puck_removed" event if the last tell_position is not None processed_pucks.add(existing_puck.id) # Mark this puck as processed
# Check if the tell position is unchanged
last_event = last_events_map.get(existing_puck.id)
if last_event and last_event.tell_position == new_position:
results.append(
{
"puck_name": puck_name,
"tell": tell,
"current_position": new_position,
"status": "unchanged",
"message": "No change in tell_position.",
}
)
continue
# Add a "puck_removed" event if the position is being changed (old
# position removed)
if last_event and last_event.tell_position is not None: if last_event and last_event.tell_position is not None:
remove_event = PuckEventModel( remove_event = PuckEventModel(
puck_id=puck.id, puck_id=existing_puck.id,
tell=None,
tell_position=None, tell_position=None,
event_type="puck_removed", # Event type set to "puck_removed" event_type="puck_removed",
timestamp=datetime.now(), timestamp=datetime.utcnow(),
) )
db.add(remove_event) db.add(remove_event)
# Add a new "tell_position_set" event # Add a new "tell_position_set" event (new position)
if new_position: new_event = PuckEventModel(
new_event = PuckEventModel( puck_id=existing_puck.id,
puck_id=puck.id, tell=tell,
tell_position=new_position, tell_position=new_position,
event_type="tell_position_set", event_type="tell_position_set",
timestamp=datetime.utcnow(),
)
db.add(new_event)
results.append(
{
"puck_name": puck.puck_name,
"new_position": new_position,
"previous_position": last_event.tell_position
if last_event
else None,
"status": "updated",
"message": "The tell_position was updated successfully.",
}
)
db.commit()
except Exception as e:
# Handle individual puck errors
results.append({"puck_name": puck_data.puck_name, "error": str(e)})
# Process pucks not included in the payload but present in the database
for puck_id, last_event in last_events.items():
# Skip pucks already processed in the previous loop
if puck_id in processed_pucks:
continue
puck = db.query(PuckModel).filter(PuckModel.id == puck_id).first()
if not puck:
continue
# Skip if the last event's tell_position is already null
if not last_event or last_event.tell_position is None:
continue
try:
# Add a "puck_removed" event
remove_event = PuckEventModel(
puck_id=puck.id,
tell_position=None,
event_type="puck_removed", # Event type set to "puck_removed"
timestamp=datetime.utcnow(), timestamp=datetime.utcnow(),
) )
db.add(remove_event) db.add(new_event)
results.append( results.append(
{ {
"puck_name": puck.puck_name, "puck_name": puck_name,
"removed_position": last_event.tell_position, "tell": tell,
"status": "removed", "new_position": new_position,
"message": "Puck is not in payload and" "previous_position": (
" has been marked as removed from tell_position.", last_event.tell_position if last_event else None
),
"status": "updated",
"message": "Tell position updated successfully.",
} }
) )
db.commit()
except Exception as e: except Exception as e:
# Handle errors for individual puck removal results.append(
results.append({"puck_name": puck.puck_name, "error": str(e)}) {
"puck_name": puck_name,
"error": str(e),
}
)
# Step 2: Handle "absent" pucks for removal
for puck in pucks_at_beamline:
if puck.id not in processed_pucks: # This puck was not in the payload
last_event = last_events_map.get(
puck.id
) # Fetch the last event for the puck
# Only remove pucks that have a valid last event with a non-null
# tell_position
if last_event and last_event.tell_position is not None:
try:
remove_event = PuckEventModel(
puck_id=puck.id,
tell=None,
tell_position=None,
event_type="puck_removed",
timestamp=datetime.utcnow(),
)
db.add(remove_event)
results.append(
{
"puck_name": puck.puck_name,
"tell": tell,
"removed_position": last_event.tell_position,
"status": "removed",
"message": "Puck removed from tell_position.",
}
)
except Exception as e:
results.append(
{
"puck_name": puck.puck_name,
"error": str(e),
}
)
# Step 3: Commit all changes to the DB
db.commit()
return results return results
@ -193,13 +438,12 @@ async def set_tell_positions(
@router.get("/with-tell-position", response_model=List[PuckWithTellPosition]) @router.get("/with-tell-position", response_model=List[PuckWithTellPosition])
async def get_pucks_with_tell_position(db: Session = Depends(get_db)): async def get_pucks_with_tell_position(db: Session = Depends(get_db)):
""" """
Retrieve all pucks with a `tell_position` set (not null), Retrieve all pucks with a valid `tell_position` set (non-null),
their associated samples, and the latest `tell_position` value (if any). their associated samples, and the latest `tell_position` value (if any).
Only include pucks when their latest event has a `tell_position` Only include pucks when their latest event has a `tell_position`
set and matches "tell_position_set". set and an `event_type` matching "tell_position_set".
""" """
# Step 1: Prepare a subquery to fetch the latest event timestamp for each # Step 1: Prepare a subquery to fetch the latest event timestamp for each puck.
# puck with a non-null tell_position
latest_event_subquery = ( latest_event_subquery = (
db.query( db.query(
PuckEventModel.puck_id, PuckEventModel.puck_id,
@ -209,7 +453,7 @@ async def get_pucks_with_tell_position(db: Session = Depends(get_db)):
.subquery() .subquery()
) )
# Step 2: Query the pucks and their latest `tell_position` by joining the subquery # Step 2: Main query - fetch pucks with latest `tell_position` event details
pucks_with_events = ( pucks_with_events = (
db.query(PuckModel, PuckEventModel, DewarModel) db.query(PuckModel, PuckEventModel, DewarModel)
.join(PuckEventModel, PuckModel.id == PuckEventModel.puck_id) .join(PuckEventModel, PuckModel.id == PuckEventModel.puck_id)
@ -220,21 +464,23 @@ async def get_pucks_with_tell_position(db: Session = Depends(get_db)):
) )
.outerjoin( .outerjoin(
DewarModel, PuckModel.dewar_id == DewarModel.id DewarModel, PuckModel.dewar_id == DewarModel.id
) # Outer join with DewarModel ) # Optional, include related dewar info
.filter(
PuckEventModel.tell_position.isnot(None)
) # Only include non-null `tell_position`
.filter(
PuckEventModel.event_type == "tell_position_set"
) # Only include relevant event types
.all() .all()
) )
# Return an empty list if no relevant pucks are found
if not pucks_with_events: if not pucks_with_events:
return [] return []
# Step 3: Construct the response with pucks and their latest tell_position # Step 3: Construct the response with pucks and their valid tell_position
results = [] results = []
# Debug output for verification
print(f"Pucks with Events and Dewars: {pucks_with_events}")
for puck, event, dewar in pucks_with_events: for puck, event, dewar in pucks_with_events:
print(f"Puck: {puck}, Event: {event}, Dewar: {dewar}")
if event.tell_position is None:
continue
# Fetch associated samples for this puck # Fetch associated samples for this puck
samples = db.query(SampleModel).filter(SampleModel.puck_id == puck.id).all() samples = db.query(SampleModel).filter(SampleModel.puck_id == puck.id).all()
@ -252,8 +498,8 @@ async def get_pucks_with_tell_position(db: Session = Depends(get_db)):
if dewar and dewar.dewar_name if dewar and dewar.dewar_name
else None, else None,
pgroup=str(dewar.pgroups) pgroup=str(dewar.pgroups)
if dewar.pgroups if dewar and dewar.pgroups
else None, # will be replaced later by puck pgroup else None, # Replace later by puck pgroup if needed
samples=[ samples=[
Sample( Sample(
id=sample.id, id=sample.id,
@ -271,7 +517,7 @@ async def get_pucks_with_tell_position(db: Session = Depends(get_db)):
) )
for sample in samples for sample in samples
], ],
tell_position=str(event.tell_position) if event else None, tell_position=str(event.tell_position),
) )
) )
@ -361,163 +607,38 @@ async def get_last_tell_position(puck_id: str, db: Session = Depends(get_db)):
async def get_pucks_by_slot(slot_identifier: str, db: Session = Depends(get_db)): async def get_pucks_by_slot(slot_identifier: str, db: Session = Depends(get_db)):
""" """
Retrieve all pucks in a slot, reporting their latest event and Retrieve all pucks in a slot, reporting their latest event and
`tell_position` value. `tell_position` value.
Args:
slot_identifier (str): The slot identifier (e.g., "PXI", "48").
db (Session): Database session dependency.
Returns:
List[PuckWithTellPosition]: List of pucks in the specified slot.
""" """
# Map keywords to slot IDs # Resolve the slot identifier to a numeric slot ID using the function
slot_aliases = {
"PXI": 47,
"PXII": 48,
"PXIII": 49,
"X06SA": 47,
"X10SA": 48,
"X06DA": 49,
}
# Resolve slot ID or alias
try: try:
slot_id = int(slot_identifier) slot_id = resolve_slot_id(slot_identifier)
except ValueError: except HTTPException as e:
slot_id = slot_aliases.get(slot_identifier.upper()) logger.error(
if not slot_id: f"Failed to resolve slot identifier: {slot_identifier}. Error: {e.detail}"
logger.error(f"Invalid slot alias: {slot_identifier}") )
raise HTTPException( raise e
status_code=400, detail=f"Invalid slot identifier: {slot_identifier}"
)
logger.info(f"Resolved slot identifier: {slot_identifier} to Slot ID: {slot_id}") logger.info(f"Resolved slot identifier '{slot_identifier}' to Slot ID: {slot_id}")
# Verify slot existence # Fetch the pucks at the beamline for the resolved slot ID
slot = db.query(SlotModel).filter(SlotModel.id == slot_id).first() pucks = get_pucks_at_beamline(slot_id, db)
if not slot:
logger.error(f"Slot not found: {slot_identifier}") if not pucks:
logger.warning(
f"No pucks found for the slot '{slot_identifier}' (ID: {slot_id})"
)
raise HTTPException( raise HTTPException(
status_code=404, detail=f"Slot not found for identifier {slot_identifier}" status_code=404, detail=f"No pucks found for slot '{slot_identifier}'"
)
# Subquery to find the latest event for each dewar
latest_event_subquery = (
db.query(
LogisticsEventModel.dewar_id.label("dewar_id"),
func.max(LogisticsEventModel.timestamp).label("latest_event_time"),
)
.group_by(LogisticsEventModel.dewar_id)
.subquery(name="latest_event_subquery")
)
# Main query to fetch dewars where the latest event is "beamline"
dewars = (
db.query(DewarModel)
.join(LogisticsEventModel, DewarModel.id == LogisticsEventModel.dewar_id)
.join(
latest_event_subquery,
(LogisticsEventModel.dewar_id == latest_event_subquery.c.dewar_id)
& (
LogisticsEventModel.timestamp
== latest_event_subquery.c.latest_event_time
), # Match latest event
)
.filter(
LogisticsEventModel.slot_id == slot_id,
LogisticsEventModel.event_type
== "beamline", # Ensure latest event is "beamline"
)
.all()
)
if not dewars:
logger.warning(f"No dewars found for slot: {slot_identifier}")
raise HTTPException(
status_code=404, detail=f"No dewars found for slot {slot_identifier}"
) )
logger.info( logger.info(
f"Found dewars for slot {slot_identifier}: {[dewar.id for dewar in dewars]}" f"Found {len(pucks)} pucks for slot '{slot_identifier}' (ID: {slot_id})"
) )
dewar_ids = [dewar.id for dewar in dewars] return pucks
dewar_map = {dewar.id: dewar.dewar_name for dewar in dewars}
dewar_pgroups = {dewar.id: dewar.pgroups for dewar in dewars}
# Subquery to fetch the latest event for each puck (any type of event)
latest_event_subquery = (
db.query(
PuckEventModel.puck_id.label("puck_id"),
func.max(PuckEventModel.timestamp).label("latest_event_time"),
)
.group_by(PuckEventModel.puck_id)
.subquery(name="latest_event_subquery") # Explicitly name the subquery
)
# Main query to fetch pucks and their latest events
pucks_with_latest_events = (
db.query(
PuckModel,
PuckEventModel.event_type,
PuckEventModel.tell_position,
DewarModel, # Include DewarModel
)
.join( # Join pucks with the latest event
latest_event_subquery,
PuckModel.id == latest_event_subquery.c.puck_id,
isouter=True,
)
.join( # Fetch event details from the latest event timestamp
PuckEventModel,
(PuckEventModel.puck_id == latest_event_subquery.c.puck_id)
& (PuckEventModel.timestamp == latest_event_subquery.c.latest_event_time),
isouter=True,
)
.join( # Join with DewarModel to get dewar details
DewarModel,
PuckModel.dewar_id == DewarModel.id,
isouter=True,
)
.filter(PuckModel.dewar_id.in_(dewar_ids)) # Restrict pucks to relevant dewars
.all()
)
# Log the results of the query
logger.debug(f"Results from query (latest events): {pucks_with_latest_events}")
if not pucks_with_latest_events:
logger.warning(f"No pucks found for slot: {slot_identifier}")
raise HTTPException(
status_code=404, detail=f"No pucks found for slot {slot_identifier}"
)
# Prepare the final response
results = []
for puck, event_type, tell_position, dewar in pucks_with_latest_events:
logger.debug(
f"Puck ID: {puck.id}, Name: {puck.puck_name}, Event Type: {event_type}, "
f"Tell Position: {tell_position}"
)
dewar_name = dewar_map.get(puck.dewar_id, "Unknown")
pgroup = dewar_pgroups.get(
puck.dewar_id
) # will be replaced later by puck pgroup
# For pucks with no events or whose latest event is `puck_removed`, set
# `tell_position` to None
if event_type is None or event_type == "puck_removed":
tell_position = None
# Construct the response model
results.append(
PuckWithTellPosition(
id=puck.id,
pgroup=pgroup,
puck_name=puck.puck_name,
puck_type=puck.puck_type,
puck_location_in_dewar=int(puck.puck_location_in_dewar)
if puck.puck_location_in_dewar
else None,
dewar_id=puck.dewar_id,
dewar_name=dewar_name,
tell_position=tell_position,
)
)
logger.info(f"Final response for slot {slot_identifier}: {results}")
return results

View File

@ -1,4 +1,4 @@
from typing import List, Optional from typing import List, Optional, ClassVar
from datetime import datetime from datetime import datetime
from pydantic import BaseModel, EmailStr, constr, Field, field_validator from pydantic import BaseModel, EmailStr, constr, Field, field_validator
from datetime import date from datetime import date
@ -668,6 +668,13 @@ class SetTellPosition(BaseModel):
return None return None
class SetTellPositionRequest(BaseModel):
tell: str
pucks: List[SetTellPosition]
from_attributes: ClassVar[bool] = True
class PuckWithTellPosition(BaseModel): class PuckWithTellPosition(BaseModel):
id: int id: int
puck_name: str puck_name: str

View File

@ -6,8 +6,8 @@
"metadata": { "metadata": {
"collapsed": true, "collapsed": true,
"ExecuteTime": { "ExecuteTime": {
"end_time": "2025-01-30T11:29:38.703954Z", "end_time": "2025-02-04T12:17:43.144287Z",
"start_time": "2025-01-30T11:29:38.307050Z" "start_time": "2025-02-04T12:17:43.141596Z"
} }
}, },
"source": [ "source": [
@ -47,13 +47,13 @@
] ]
} }
], ],
"execution_count": 1 "execution_count": 22
}, },
{ {
"metadata": { "metadata": {
"ExecuteTime": { "ExecuteTime": {
"end_time": "2025-01-30T11:07:14.795059Z", "end_time": "2025-02-03T08:48:55.604554Z",
"start_time": "2025-01-30T11:07:14.783786Z" "start_time": "2025-02-03T08:48:55.583427Z"
} }
}, },
"cell_type": "code", "cell_type": "code",
@ -92,32 +92,24 @@
"id": "45cc7ab6d4589711", "id": "45cc7ab6d4589711",
"outputs": [ "outputs": [
{ {
"name": "stdout", "ename": "AttributeError",
"output_type": "stream", "evalue": "'ShipmentsApi' object has no attribute 'fetch_shipments_shipments_get'",
"text": [ "output_type": "error",
"Exception when calling ShipmentsApi->fetch_shipments_shipments_get: (404)\n", "traceback": [
"Reason: Not Found\n", "\u001B[0;31m---------------------------------------------------------------------------\u001B[0m",
"HTTP response headers: HTTPHeaderDict({'date': 'Thu, 30 Jan 2025 11:07:14 GMT', 'server': 'uvicorn', 'content-length': '22', 'content-type': 'application/json'})\n", "\u001B[0;31mAttributeError\u001B[0m Traceback (most recent call last)",
"HTTP response body: {\"detail\":\"Not Found\"}\n", "Cell \u001B[0;32mIn[3], line 12\u001B[0m\n\u001B[1;32m 8\u001B[0m api_instance \u001B[38;5;241m=\u001B[39m aareDBclient\u001B[38;5;241m.\u001B[39mShipmentsApi(api_client)\n\u001B[1;32m 10\u001B[0m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[1;32m 11\u001B[0m \u001B[38;5;66;03m# Fetch all shipments\u001B[39;00m\n\u001B[0;32m---> 12\u001B[0m all_shipments_response \u001B[38;5;241m=\u001B[39m \u001B[43mapi_instance\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mfetch_shipments_shipments_get\u001B[49m()\n\u001B[1;32m 14\u001B[0m \u001B[38;5;66;03m# Print shipment names and their associated puck names\u001B[39;00m\n\u001B[1;32m 15\u001B[0m \u001B[38;5;28;01mfor\u001B[39;00m shipment \u001B[38;5;129;01min\u001B[39;00m all_shipments_response:\n",
"\n" "\u001B[0;31mAttributeError\u001B[0m: 'ShipmentsApi' object has no attribute 'fetch_shipments_shipments_get'"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/site-packages/urllib3/connectionpool.py:1097: InsecureRequestWarning: Unverified HTTPS request is being made to host '127.0.0.1'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings\n",
" warnings.warn(\n"
] ]
} }
], ],
"execution_count": 6 "execution_count": 3
}, },
{ {
"metadata": { "metadata": {
"ExecuteTime": { "ExecuteTime": {
"end_time": "2025-01-17T14:14:30.201369Z", "end_time": "2025-02-03T22:26:47.957072Z",
"start_time": "2025-01-17T14:14:30.175577Z" "start_time": "2025-02-03T22:26:47.935362Z"
} }
}, },
"cell_type": "code", "cell_type": "code",
@ -132,7 +124,7 @@
" try:\n", " try:\n",
" # Create payload using the required model\n", " # Create payload using the required model\n",
" logistics_event_create = LogisticsEventCreate(\n", " logistics_event_create = LogisticsEventCreate(\n",
" dewar_qr_code='15e3dbe05e78ee83',\n", " dewar_qr_code='923db239427869be',\n",
" location_qr_code='A2-X06SA',\n", " location_qr_code='A2-X06SA',\n",
" transaction_type='incoming',\n", " transaction_type='incoming',\n",
" timestamp=date.today() # Adjust if the API expects datetime\n", " timestamp=date.today() # Adjust if the API expects datetime\n",
@ -150,7 +142,7 @@
" try:\n", " try:\n",
" # Create payload using the required model\n", " # Create payload using the required model\n",
" logistics_event_create = LogisticsEventCreate(\n", " logistics_event_create = LogisticsEventCreate(\n",
" dewar_qr_code='15e3dbe05e78ee83',\n", " dewar_qr_code='923db239427869be',\n",
" location_qr_code='A2-X06SA',\n", " location_qr_code='A2-X06SA',\n",
" transaction_type='refill',\n", " transaction_type='refill',\n",
" timestamp=date.today() # Adjust if the API expects datetime\n", " timestamp=date.today() # Adjust if the API expects datetime\n",
@ -168,7 +160,7 @@
" try:\n", " try:\n",
" # Create payload using the required model\n", " # Create payload using the required model\n",
" logistics_event_create = LogisticsEventCreate(\n", " logistics_event_create = LogisticsEventCreate(\n",
" dewar_qr_code='15e3dbe05e78ee83',\n", " dewar_qr_code='923db239427869be',\n",
" location_qr_code='X06DA-Beamline',\n", " location_qr_code='X06DA-Beamline',\n",
" transaction_type='beamline',\n", " transaction_type='beamline',\n",
" timestamp=date.today() # Adjust if the API expects datetime\n", " timestamp=date.today() # Adjust if the API expects datetime\n",
@ -189,7 +181,11 @@
"name": "stdout", "name": "stdout",
"output_type": "stream", "output_type": "stream",
"text": [ "text": [
"API Response: {'message': 'Status updated successfully'}\n", "Exception when calling LogisticsApi->scan_dewar_logistics_dewar_scan_post: (400)\n",
"Reason: Bad Request\n",
"HTTP response headers: HTTPHeaderDict({'date': 'Mon, 03 Feb 2025 22:26:47 GMT', 'server': 'uvicorn', 'content-length': '47', 'content-type': 'application/json'})\n",
"HTTP response body: {\"detail\":\"Slot not found or already occupied\"}\n",
"\n",
"API Response: {'message': 'Status updated successfully'}\n", "API Response: {'message': 'Status updated successfully'}\n",
"API Response: {'message': 'Status updated successfully'}\n" "API Response: {'message': 'Status updated successfully'}\n"
] ]
@ -207,13 +203,13 @@
] ]
} }
], ],
"execution_count": 45 "execution_count": 4
}, },
{ {
"metadata": { "metadata": {
"ExecuteTime": { "ExecuteTime": {
"end_time": "2025-01-30T11:35:20.036682Z", "end_time": "2025-02-04T13:40:09.144335Z",
"start_time": "2025-01-30T11:35:20.018284Z" "start_time": "2025-02-04T13:40:09.125904Z"
} }
}, },
"cell_type": "code", "cell_type": "code",
@ -242,13 +238,13 @@
"text": [ "text": [
"The response of PucksApi->get_pucks_by_slot_pucks_slot_slot_identifier_get:\n", "The response of PucksApi->get_pucks_by_slot_pucks_slot_slot_identifier_get:\n",
"\n", "\n",
"[PuckWithTellPosition(id=1, puck_name='PUCK-001', puck_type='Unipuck', puck_location_in_dewar=1, dewar_id=1, dewar_name='Dewar One', pgroup='p20001', samples=None, tell_position=None),\n", "[PuckWithTellPosition(id=38, puck_name='PSIMX074', puck_type='unipuck', puck_location_in_dewar=1, dewar_id=7, dewar_name='31012025', pgroup='p20001', samples=None, tell_position=None),\n",
" PuckWithTellPosition(id=2, puck_name='PUCK002', puck_type='Unipuck', puck_location_in_dewar=2, dewar_id=1, dewar_name='Dewar One', pgroup='p20001', samples=None, tell_position=None),\n", " PuckWithTellPosition(id=39, puck_name='PSIMX080', puck_type='unipuck', puck_location_in_dewar=2, dewar_id=7, dewar_name='31012025', pgroup='p20001', samples=None, tell_position=None),\n",
" PuckWithTellPosition(id=3, puck_name='PUCK003', puck_type='Unipuck', puck_location_in_dewar=3, dewar_id=1, dewar_name='Dewar One', pgroup='p20001', samples=None, tell_position=None),\n", " PuckWithTellPosition(id=40, puck_name='PSIMX081', puck_type='unipuck', puck_location_in_dewar=3, dewar_id=7, dewar_name='31012025', pgroup='p20001', samples=None, tell_position=None),\n",
" PuckWithTellPosition(id=4, puck_name='PUCK004', puck_type='Unipuck', puck_location_in_dewar=4, dewar_id=1, dewar_name='Dewar One', pgroup='p20001', samples=None, tell_position=None),\n", " PuckWithTellPosition(id=41, puck_name='PSIMX084', puck_type='unipuck', puck_location_in_dewar=4, dewar_id=7, dewar_name='31012025', pgroup='p20001', samples=None, tell_position=None),\n",
" PuckWithTellPosition(id=5, puck_name='PUCK005', puck_type='Unipuck', puck_location_in_dewar=5, dewar_id=1, dewar_name='Dewar One', pgroup='p20001', samples=None, tell_position=None),\n", " PuckWithTellPosition(id=42, puck_name='PSIMX104', puck_type='unipuck', puck_location_in_dewar=5, dewar_id=7, dewar_name='31012025', pgroup='p20001', samples=None, tell_position=None),\n",
" PuckWithTellPosition(id=6, puck_name='PUCK006', puck_type='Unipuck', puck_location_in_dewar=6, dewar_id=1, dewar_name='Dewar One', pgroup='p20001', samples=None, tell_position=None),\n", " PuckWithTellPosition(id=43, puck_name='PSIMX107', puck_type='unipuck', puck_location_in_dewar=6, dewar_id=7, dewar_name='31012025', pgroup='p20001', samples=None, tell_position=None),\n",
" PuckWithTellPosition(id=7, puck_name='PUCK007', puck_type='Unipuck', puck_location_in_dewar=7, dewar_id=1, dewar_name='Dewar One', pgroup='p20001', samples=None, tell_position=None)]\n" " PuckWithTellPosition(id=44, puck_name='PSIMX117', puck_type='unipuck', puck_location_in_dewar=7, dewar_id=7, dewar_name='31012025', pgroup='p20001', samples=None, tell_position=None)]\n"
] ]
}, },
{ {
@ -260,40 +256,57 @@
] ]
} }
], ],
"execution_count": 5 "execution_count": 52
}, },
{ {
"metadata": { "metadata": {
"ExecuteTime": { "ExecuteTime": {
"end_time": "2025-01-30T12:30:46.242711Z", "end_time": "2025-02-04T13:40:49.933951Z",
"start_time": "2025-01-30T12:30:46.215343Z" "start_time": "2025-02-04T13:40:49.910479Z"
} }
}, },
"cell_type": "code", "cell_type": "code",
"source": [ "source": [
"from aareDBclient import SetTellPosition\n", "from aareDBclient import SetTellPosition, SetTellPositionRequest\n",
"\n",
"# Attribute a puck to a position in the TELL dewar\n",
"\n", "\n",
"with aareDBclient.ApiClient(configuration) as api_client:\n", "with aareDBclient.ApiClient(configuration) as api_client:\n",
" # Create an instance of the API class\n", " # Create an instance of the API class\n",
" api_instance = aareDBclient.PucksApi(api_client)\n", " api_instance = aareDBclient.PucksApi(api_client)\n",
" get_pucks_at_beamline = aareDBclient.PucksApi(api_client)\n",
"\n", "\n",
" # This part is commented but will be used to attribute a puck to a position of the TELL\n", " # Payload with SetTellPosition objects\n",
" # Define the puck ID and payload\n", " payload = SetTellPositionRequest(\n",
"\n", " tell=\"X06DA\",\n",
" payload = [SetTellPosition(puck_name='PUCK006', segment='A', puck_in_segment=2),SetTellPosition(puck_name='PUCK005', segment='C', puck_in_segment=3)]\n", " pucks=[\n",
" #payload = []\n", " SetTellPosition(puck_name='PSIMX074', segment='B', puck_in_segment=1),\n",
" SetTellPosition(puck_name='PSIMX080', segment='B', puck_in_segment=2),\n",
" SetTellPosition(puck_name='PSIMX081', segment='C', puck_in_segment=3),\n",
" SetTellPosition(puck_name='PSIMX084', segment='C', puck_in_segment=4),\n",
" SetTellPosition(puck_name='PSIMX104', segment='E', puck_in_segment=5),\n",
" SetTellPosition(puck_name='PSIMX107', segment='E', puck_in_segment=1),\n",
" SetTellPosition(puck_name='PSIMX117', segment='F', puck_in_segment=2),\n",
" ]\n",
" #pucks=[\n",
" # SetTellPosition(puck_name='PSIMX074', segment='F', puck_in_segment=1),\n",
" # SetTellPosition(puck_name='PSIMX080', segment='F', puck_in_segment=2),\n",
" # SetTellPosition(puck_name='PSIMX081', segment='F', puck_in_segment=3),\n",
" # SetTellPosition(puck_name='PSIMX084', segment='F', puck_in_segment=4),\n",
" # SetTellPosition(puck_name='PSIMX107', segment='A', puck_in_segment=1),\n",
" # SetTellPosition(puck_name='PSIMX117', segment='A', puck_in_segment=2),\n",
" #]\n",
" #pucks = []\n",
" )\n",
"\n", "\n",
" # Call the PUT method to update the tell_position\n",
" try:\n", " try:\n",
" # Call the PUT method to update the tell_position\n", " api_response = api_instance.set_tell_positions_pucks_set_tell_positions_put(\n",
" api_response = api_instance.set_tell_positions_pucks_set_tell_positions_put(payload)\n", " set_tell_position_request=payload\n",
" ) # Pass the entire payload as a single parameter\n",
"\n",
" print(\"The response of PucksApi->pucks_puck_id_tell_position_put:\\n\")\n", " print(\"The response of PucksApi->pucks_puck_id_tell_position_put:\\n\")\n",
" pprint(api_response)\n", " pprint(api_response)\n",
"\n", "\n",
" except ApiException as e:\n", " except Exception as e:\n",
" print(\"Exception when calling PucksApi->pucks_puck_id_tell_position_put: %s\\n\" % e)" " print(f\"Exception when calling PucksApi: {e}\")\n"
], ],
"id": "d52d12287dd63299", "id": "d52d12287dd63299",
"outputs": [ "outputs": [
@ -303,16 +316,48 @@
"text": [ "text": [
"The response of PucksApi->pucks_puck_id_tell_position_put:\n", "The response of PucksApi->pucks_puck_id_tell_position_put:\n",
"\n", "\n",
"[{'message': 'The tell_position was updated successfully.',\n", "[{'message': 'Tell position updated successfully.',\n",
" 'new_position': 'A2',\n", " 'new_position': 'B1',\n",
" 'previous_position': None,\n", " 'previous_position': None,\n",
" 'puck_name': 'PUCK006',\n", " 'puck_name': 'PSIMX074',\n",
" 'status': 'updated'},\n", " 'status': 'updated',\n",
" {'message': 'The tell_position was updated successfully.',\n", " 'tell': 'X06DA'},\n",
" {'message': 'Tell position updated successfully.',\n",
" 'new_position': 'B2',\n",
" 'previous_position': None,\n",
" 'puck_name': 'PSIMX080',\n",
" 'status': 'updated',\n",
" 'tell': 'X06DA'},\n",
" {'message': 'Tell position updated successfully.',\n",
" 'new_position': 'C3',\n", " 'new_position': 'C3',\n",
" 'previous_position': None,\n", " 'previous_position': None,\n",
" 'puck_name': 'PUCK005',\n", " 'puck_name': 'PSIMX081',\n",
" 'status': 'updated'}]\n" " 'status': 'updated',\n",
" 'tell': 'X06DA'},\n",
" {'message': 'Tell position updated successfully.',\n",
" 'new_position': 'C4',\n",
" 'previous_position': None,\n",
" 'puck_name': 'PSIMX084',\n",
" 'status': 'updated',\n",
" 'tell': 'X06DA'},\n",
" {'message': 'Tell position updated successfully.',\n",
" 'new_position': 'E5',\n",
" 'previous_position': None,\n",
" 'puck_name': 'PSIMX104',\n",
" 'status': 'updated',\n",
" 'tell': 'X06DA'},\n",
" {'message': 'Tell position updated successfully.',\n",
" 'new_position': 'E1',\n",
" 'previous_position': None,\n",
" 'puck_name': 'PSIMX107',\n",
" 'status': 'updated',\n",
" 'tell': 'X06DA'},\n",
" {'message': 'Tell position updated successfully.',\n",
" 'new_position': 'F2',\n",
" 'previous_position': None,\n",
" 'puck_name': 'PSIMX117',\n",
" 'status': 'updated',\n",
" 'tell': 'X06DA'}]\n"
] ]
}, },
{ {
@ -324,13 +369,13 @@
] ]
} }
], ],
"execution_count": 8 "execution_count": 55
}, },
{ {
"metadata": { "metadata": {
"ExecuteTime": { "ExecuteTime": {
"end_time": "2025-01-30T12:35:40.734188Z", "end_time": "2025-02-04T13:36:46.598976Z",
"start_time": "2025-01-30T12:35:40.679071Z" "start_time": "2025-02-04T13:36:46.568865Z"
} }
}, },
"cell_type": "code", "cell_type": "code",
@ -359,11 +404,11 @@
" print(f\"Puck ID: {p.id}, Puck Name: {p.puck_name}\")\n", " print(f\"Puck ID: {p.id}, Puck Name: {p.puck_name}\")\n",
"\n", "\n",
" # Check if the puck has any samples\n", " # Check if the puck has any samples\n",
" if hasattr(p, 'samples') and p.samples: # Ensure 'samples' attribute exists and is not empty\n", " #if hasattr(p, 'samples') and p.samples: # Ensure 'samples' attribute exists and is not empty\n",
" for sample in p.samples:\n", " # for sample in p.samples:\n",
" print(f\" Sample ID: {sample.id}, Sample Name: {sample.sample_name}\")\n", " # print(f\" Sample ID: {sample.id}, Sample Name: {sample.sample_name}, Position: {sample.position}, Mount count: {sample.mount_count}\")\n",
" else:\n", " #else:\n",
" print(\" No samples found in this puck.\")\n", " # print(\" No samples found in this puck.\")\n",
"\n", "\n",
" except ApiException as e:\n", " except ApiException as e:\n",
" print(\"Exception when calling PucksApi->get_all_pucks_in_tell: %s\\n\" % e)" " print(\"Exception when calling PucksApi->get_all_pucks_in_tell: %s\\n\" % e)"
@ -374,28 +419,12 @@
"name": "stdout", "name": "stdout",
"output_type": "stream", "output_type": "stream",
"text": [ "text": [
"Puck ID: 6, Puck Name: PUCK006\n", "Puck ID: 31, Puck Name: PSIMX074\n",
" Sample ID: 28, Sample Name: Sample028\n", "Puck ID: 32, Puck Name: PSIMX080\n",
" Sample ID: 29, Sample Name: Sample029\n", "Puck ID: 33, Puck Name: PSIMX081\n",
" Sample ID: 30, Sample Name: Sample030\n", "Puck ID: 34, Puck Name: PSIMX084\n",
" Sample ID: 31, Sample Name: Sample031\n", "Puck ID: 36, Puck Name: PSIMX107\n",
" Sample ID: 32, Sample Name: Sample032\n", "Puck ID: 37, Puck Name: PSIMX117\n"
" Sample ID: 33, Sample Name: Sample033\n",
" Sample ID: 34, Sample Name: Sample034\n",
" Sample ID: 35, Sample Name: Sample035\n",
" Sample ID: 36, Sample Name: Sample036\n",
" Sample ID: 37, Sample Name: Sample037\n",
" Sample ID: 38, Sample Name: Sample038\n",
" Sample ID: 39, Sample Name: Sample039\n",
" Sample ID: 40, Sample Name: Sample040\n",
"Puck ID: 5, Puck Name: PUCK005\n",
" Sample ID: 21, Sample Name: Sample021\n",
" Sample ID: 22, Sample Name: Sample022\n",
" Sample ID: 23, Sample Name: Sample023\n",
" Sample ID: 24, Sample Name: Sample024\n",
" Sample ID: 25, Sample Name: Sample025\n",
" Sample ID: 26, Sample Name: Sample026\n",
" Sample ID: 27, Sample Name: Sample027\n"
] ]
}, },
{ {
@ -407,13 +436,13 @@
] ]
} }
], ],
"execution_count": 10 "execution_count": 49
}, },
{ {
"metadata": { "metadata": {
"ExecuteTime": { "ExecuteTime": {
"end_time": "2025-01-30T12:36:50.600728Z", "end_time": "2025-01-31T13:46:18.354067Z",
"start_time": "2025-01-30T12:36:50.581752Z" "start_time": "2025-01-31T13:46:18.332891Z"
} }
}, },
"cell_type": "code", "cell_type": "code",
@ -429,8 +458,8 @@
" try:\n", " try:\n",
" # Define the payload with only `event_type`\n", " # Define the payload with only `event_type`\n",
" sample_event_create = SampleEventCreate(\n", " sample_event_create = SampleEventCreate(\n",
" sample_id=27,\n", " sample_id=28,\n",
" event_type=\"Unmounted\" # Valid event type\n", " event_type=\"Mounted\" # Valid event type\n",
" )\n", " )\n",
"\n", "\n",
" # Debug the payload before sending\n", " # Debug the payload before sending\n",
@ -439,12 +468,15 @@
"\n", "\n",
" # Call the API\n", " # Call the API\n",
" api_response = api_instance.create_sample_event_samples_samples_sample_id_events_post(\n", " api_response = api_instance.create_sample_event_samples_samples_sample_id_events_post(\n",
" sample_id=27, # Ensure this matches a valid sample ID in the database\n", " sample_id=28, # Ensure this matches a valid sample ID in the database\n",
" sample_event_create=sample_event_create\n", " sample_event_create=sample_event_create\n",
" )\n", " )\n",
"\n", "\n",
" print(\"API response:\")\n", " print(\"API response:\")\n",
" pprint(api_response)\n", " #pprint(api_response)\n",
"\n",
" for p in api_response:\n",
" print(p)\n",
"\n", "\n",
" except ApiException as e:\n", " except ApiException as e:\n",
" print(\"Exception when calling post_sample_event:\")\n", " print(\"Exception when calling post_sample_event:\")\n",
@ -459,9 +491,21 @@
"output_type": "stream", "output_type": "stream",
"text": [ "text": [
"Payload being sent to API:\n", "Payload being sent to API:\n",
"{\"event_type\":\"Unmounted\"}\n", "{\"event_type\":\"Mounted\"}\n",
"API response:\n", "API response:\n",
"Sample(id=27, sample_name='Sample027', position=15, puck_id=5, crystalname=None, proteinname=None, positioninpuck=None, priority=None, comments=None, data_collection_parameters=None, events=[SampleEventResponse(id=406, sample_id=27, event_type='Mounted', timestamp=datetime.datetime(2025, 1, 30, 13, 36, 34)), SampleEventResponse(id=407, sample_id=27, event_type='Unmounted', timestamp=datetime.datetime(2025, 1, 30, 13, 36, 51))], mount_count=1, unmount_count=1)\n" "('id', 28)\n",
"('sample_name', 'Sample028')\n",
"('position', 1)\n",
"('puck_id', 6)\n",
"('crystalname', None)\n",
"('proteinname', None)\n",
"('positioninpuck', None)\n",
"('priority', None)\n",
"('comments', None)\n",
"('data_collection_parameters', None)\n",
"('events', [SampleEventResponse(id=37, sample_id=28, event_type='Mounted', timestamp=datetime.datetime(2025, 1, 29, 14, 3)), SampleEventResponse(id=38, sample_id=28, event_type='Unmounted', timestamp=datetime.datetime(2025, 1, 29, 14, 3, 50)), SampleEventResponse(id=408, sample_id=28, event_type='Mounted', timestamp=datetime.datetime(2025, 1, 31, 13, 10, 3)), SampleEventResponse(id=409, sample_id=28, event_type='Unmounted', timestamp=datetime.datetime(2025, 1, 31, 13, 12, 35)), SampleEventResponse(id=410, sample_id=28, event_type='Mounted', timestamp=datetime.datetime(2025, 1, 31, 13, 16, 55)), SampleEventResponse(id=411, sample_id=28, event_type='Unmounted', timestamp=datetime.datetime(2025, 1, 31, 13, 17, 8)), SampleEventResponse(id=412, sample_id=28, event_type='Mounted', timestamp=datetime.datetime(2025, 1, 31, 14, 46, 18))])\n",
"('mount_count', 4)\n",
"('unmount_count', 3)\n"
] ]
}, },
{ {
@ -473,25 +517,25 @@
] ]
} }
], ],
"execution_count": 13 "execution_count": 11
}, },
{ {
"metadata": { "metadata": {
"ExecuteTime": { "ExecuteTime": {
"end_time": "2025-01-30T12:37:14.520342Z", "end_time": "2025-01-31T12:06:44.184990Z",
"start_time": "2025-01-30T12:37:14.508460Z" "start_time": "2025-01-31T12:06:44.174766Z"
} }
}, },
"cell_type": "code", "cell_type": "code",
"source": [ "source": [
"\n", "### not working\n",
"with aareDBclient.ApiClient(configuration) as api_client:\n", "with aareDBclient.ApiClient(configuration) as api_client:\n",
" # Create an instance of the Samples API class\n", " # Create an instance of the Samples API class\n",
" api_instance = aareDBclient.SamplesApi(api_client)\n", " api_instance = aareDBclient.SamplesApi(api_client)\n",
"\n", "\n",
" try:\n", " try:\n",
" # Get the last sample event\n", " # Get the last sample event\n",
" last_event_response = api_instance.get_last_sample_event_samples_samples_sample_id_events_last_get(14)\n", " last_event_response = api_instance.get_last_sample_event_samples_samples_sample_id_events_last_get(27)\n",
" print(\"The response of get_last_sample_event:\\n\")\n", " print(\"The response of get_last_sample_event:\\n\")\n",
" pprint(last_event_response)\n", " pprint(last_event_response)\n",
"\n", "\n",
@ -507,12 +551,12 @@
"traceback": [ "traceback": [
"\u001B[0;31m---------------------------------------------------------------------------\u001B[0m", "\u001B[0;31m---------------------------------------------------------------------------\u001B[0m",
"\u001B[0;31mAttributeError\u001B[0m Traceback (most recent call last)", "\u001B[0;31mAttributeError\u001B[0m Traceback (most recent call last)",
"Cell \u001B[0;32mIn[14], line 7\u001B[0m\n\u001B[1;32m 3\u001B[0m api_instance \u001B[38;5;241m=\u001B[39m aareDBclient\u001B[38;5;241m.\u001B[39mSamplesApi(api_client)\n\u001B[1;32m 5\u001B[0m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[1;32m 6\u001B[0m \u001B[38;5;66;03m# Get the last sample event\u001B[39;00m\n\u001B[0;32m----> 7\u001B[0m last_event_response \u001B[38;5;241m=\u001B[39m \u001B[43mapi_instance\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mget_last_sample_event_samples_samples_sample_id_events_last_get\u001B[49m(\u001B[38;5;241m14\u001B[39m)\n\u001B[1;32m 8\u001B[0m \u001B[38;5;28mprint\u001B[39m(\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124mThe response of get_last_sample_event:\u001B[39m\u001B[38;5;130;01m\\n\u001B[39;00m\u001B[38;5;124m\"\u001B[39m)\n\u001B[1;32m 9\u001B[0m pprint(last_event_response)\n", "Cell \u001B[0;32mIn[6], line 7\u001B[0m\n\u001B[1;32m 3\u001B[0m api_instance \u001B[38;5;241m=\u001B[39m aareDBclient\u001B[38;5;241m.\u001B[39mSamplesApi(api_client)\n\u001B[1;32m 5\u001B[0m \u001B[38;5;28;01mtry\u001B[39;00m:\n\u001B[1;32m 6\u001B[0m \u001B[38;5;66;03m# Get the last sample event\u001B[39;00m\n\u001B[0;32m----> 7\u001B[0m last_event_response \u001B[38;5;241m=\u001B[39m \u001B[43mapi_instance\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mget_last_sample_event_samples_samples_sample_id_events_last_get\u001B[49m(\u001B[38;5;241m27\u001B[39m)\n\u001B[1;32m 8\u001B[0m \u001B[38;5;28mprint\u001B[39m(\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124mThe response of get_last_sample_event:\u001B[39m\u001B[38;5;130;01m\\n\u001B[39;00m\u001B[38;5;124m\"\u001B[39m)\n\u001B[1;32m 9\u001B[0m pprint(last_event_response)\n",
"\u001B[0;31mAttributeError\u001B[0m: 'SamplesApi' object has no attribute 'get_last_sample_event_samples_samples_sample_id_events_last_get'" "\u001B[0;31mAttributeError\u001B[0m: 'SamplesApi' object has no attribute 'get_last_sample_event_samples_samples_sample_id_events_last_get'"
] ]
} }
], ],
"execution_count": 14 "execution_count": 6
}, },
{ {
"metadata": { "metadata": {